text
stringlengths
8
4.13M
#![allow(non_upper_case_globals)] use libc::c_void; #[repr(C, packed)] pub struct DatTable { pub data: *mut c_void, pub entry_size: u32, pub entries: u32, } whack_vars!(init_vars, 0x00400000, 0x00513C30 => units_dat: [DatTable; 0x35]; 0x005136E0 => upgrades_dat: [DatTable; 0xb]; 0x005137D8 => techdata_dat: [DatTable; 0x8]; 0x00513868 => weapons_dat: [DatTable; 0x17]; );
use std::error::Error; use reqwest::blocking::multipart; const URL: &'static str = "http://esummarizer.com/main/getsummary"; pub fn summarize_text(text: &str) -> Result<String, Box<dyn Error>> { let form = multipart::Form::new() .text("text", text.to_string()) .text("nbsentences", "5"); let client = reqwest::blocking::Client::new(); let resp = client .post(URL) .multipart(form) .send()?; let text = resp.text()?; Ok(text) }
use std::fmt::{Display, Formatter, Result}; use itertools::Itertools; #[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] pub enum PermissionLevel { User, Superuser } #[derive(Debug)] pub struct Command { pub name: &'static str, pub args: &'static [&'static str], pub message: &'static str, pub permission_level: PermissionLevel } #[macro_export] macro_rules! command { ($name: expr, $args: expr, $message: expr, $level: expr) => { Command { name: $name, args: $args, message: $message, permission_level: $level } } } impl Display for Command { fn fmt(&self, f: &mut Formatter) -> Result { let name = self.name.replace("_", "\\_"); if self.args.len() == 0 { write!(f, "/{} - {}", name, self.message) } else { let args = self.args.iter() .map(|arg| format!("<{}>", arg)) .join(" "); write!(f, "/{} {} - {}", name, args, self.message) } } }
pub use bson::Bson; pub use mongodb::{Client, ThreadedClient}; pub use mongodb::db::ThreadedDatabase; pub fn testone() -> String { let client = Client::connect("localhost", 27017) .expect("Failed to initialize standalone client."); let coll = client.db("test").collection("movies"); let doc = doc! { "title" => "Jaws", "array" => [ 1, 2, 3 ] }; // Insert document into 'test.movies' collection coll.insert_one(doc.clone(), None) .ok().expect("Failed to insert document."); // Find the document and receive a cursor let mut cursor = coll.find(Some(doc.clone()), None) .ok().expect("Failed to execute find."); let item = cursor.next(); // cursor.next() returns an Option<Result<Document>> let mut ptitle = "".to_string(); match item { Some(Ok(doc)) => match doc.get("_id") { Some(&Bson::ObjectId(ref _id)) => ptitle = ptitle+&(_id.to_hex()), _ => panic!("Expected title to be a string!"), }, Some(Err(_)) => panic!("Failed to get next from server!"), None => panic!("Server returned no results!"), } ptitle }
// vim: tw=80 //! A library of [`Futures`]-aware locking primitives. These locks can safely //! be used in asynchronous environments like [`Tokio`]. When they block, //! they'll only block a single task, not the entire reactor. //! //! These primitives generally work much like their counterparts from the //! standard library. But instead of blocking, they return a `Future` that //! completes when the lock has been acquired. //! //! # Examples //! //! ``` //! # use futures_locks::*; //! # use futures::executor::ThreadPool; //! # use futures::task::SpawnExt; //! # use futures::FutureExt; //! # fn main() { //! let mut executor = ThreadPool::new().unwrap(); //! //! let mtx = Mutex::<u32>::new(0); //! let fut = mtx.lock().map(|mut guard| { *guard += 5; }); //! executor.run(fut); //! assert_eq!(mtx.try_unwrap().unwrap(), 5); //! # } //! ``` //! //! [`Futures`]: https://github.com/rust-lang-nursery/futures-rs //! [`Tokio`]: https:/tokio.rs #![cfg_attr(feature = "nightly-docs", feature(doc_cfg))] mod mutex; mod rwlock; pub use crate::mutex::{Mutex, MutexFut, MutexGuard, MutexWeak}; pub use rwlock::{RwLock, RwLockReadFut, RwLockWriteFut, RwLockReadGuard, RwLockWriteGuard}; use futures::channel::oneshot; /// Poll state of all Futures in this crate. enum FutState { New, Pending(oneshot::Receiver<()>), Acquired }
/// An enum to represent all characters in the SupplementalSymbolsandPictographs block. #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub enum SupplementalSymbolsandPictographs { /// \u{1f900}: '๐Ÿค€' CircledCrossFormeeWithFourDots, /// \u{1f901}: '๐Ÿค' CircledCrossFormeeWithTwoDots, /// \u{1f902}: '๐Ÿค‚' CircledCrossFormee, /// \u{1f903}: '๐Ÿคƒ' LeftHalfCircleWithFourDots, /// \u{1f904}: '๐Ÿค„' LeftHalfCircleWithThreeDots, /// \u{1f905}: '๐Ÿค…' LeftHalfCircleWithTwoDots, /// \u{1f906}: '๐Ÿค†' LeftHalfCircleWithDot, /// \u{1f907}: '๐Ÿค‡' LeftHalfCircle, /// \u{1f908}: '๐Ÿคˆ' DownwardFacingHook, /// \u{1f909}: '๐Ÿค‰' DownwardFacingNotchedHook, /// \u{1f90a}: '๐ŸคŠ' DownwardFacingHookWithDot, /// \u{1f90b}: '๐Ÿค‹' DownwardFacingNotchedHookWithDot, /// \u{1f90d}: '๐Ÿค' WhiteHeart, /// \u{1f90e}: '๐ŸคŽ' BrownHeart, /// \u{1f90f}: '๐Ÿค' PinchingHand, /// \u{1f910}: '๐Ÿค' ZipperDashMouthFace, /// \u{1f911}: '๐Ÿค‘' MoneyDashMouthFace, /// \u{1f912}: '๐Ÿค’' FaceWithThermometer, /// \u{1f913}: '๐Ÿค“' NerdFace, /// \u{1f914}: '๐Ÿค”' ThinkingFace, /// \u{1f915}: '๐Ÿค•' FaceWithHeadDashBandage, /// \u{1f916}: '๐Ÿค–' RobotFace, /// \u{1f917}: '๐Ÿค—' HuggingFace, /// \u{1f918}: '๐Ÿค˜' SignOfTheHorns, /// \u{1f919}: '๐Ÿค™' CallMeHand, /// \u{1f91a}: '๐Ÿคš' RaisedBackOfHand, /// \u{1f91b}: '๐Ÿค›' LeftDashFacingFist, /// \u{1f91c}: '๐Ÿคœ' RightDashFacingFist, /// \u{1f91d}: '๐Ÿค' Handshake, /// \u{1f91e}: '๐Ÿคž' HandWithIndexAndMiddleFingersCrossed, /// \u{1f91f}: '๐ŸคŸ' ILoveYouHandSign, /// \u{1f920}: '๐Ÿค ' FaceWithCowboyHat, /// \u{1f921}: '๐Ÿคก' ClownFace, /// \u{1f922}: '๐Ÿคข' NauseatedFace, /// \u{1f923}: '๐Ÿคฃ' RollingOnTheFloorLaughing, /// \u{1f924}: '๐Ÿคค' DroolingFace, /// \u{1f925}: '๐Ÿคฅ' LyingFace, /// \u{1f926}: '๐Ÿคฆ' FacePalm, /// \u{1f927}: '๐Ÿคง' SneezingFace, /// \u{1f928}: '๐Ÿคจ' FaceWithOneEyebrowRaised, /// \u{1f929}: '๐Ÿคฉ' GrinningFaceWithStarEyes, /// \u{1f92a}: '๐Ÿคช' GrinningFaceWithOneLargeAndOneSmallEye, /// \u{1f92b}: '๐Ÿคซ' FaceWithFingerCoveringClosedLips, /// \u{1f92c}: '๐Ÿคฌ' SeriousFaceWithSymbolsCoveringMouth, /// \u{1f92d}: '๐Ÿคญ' SmilingFaceWithSmilingEyesAndHandCoveringMouth, /// \u{1f92e}: '๐Ÿคฎ' FaceWithOpenMouthVomiting, /// \u{1f92f}: '๐Ÿคฏ' ShockedFaceWithExplodingHead, /// \u{1f930}: '๐Ÿคฐ' PregnantWoman, /// \u{1f931}: '๐Ÿคฑ' BreastDashFeeding, /// \u{1f932}: '๐Ÿคฒ' PalmsUpTogether, /// \u{1f933}: '๐Ÿคณ' Selfie, /// \u{1f934}: '๐Ÿคด' Prince, /// \u{1f935}: '๐Ÿคต' ManInTuxedo, /// \u{1f936}: '๐Ÿคถ' MotherChristmas, /// \u{1f937}: '๐Ÿคท' Shrug, /// \u{1f938}: '๐Ÿคธ' PersonDoingCartwheel, /// \u{1f939}: '๐Ÿคน' Juggling, /// \u{1f93a}: '๐Ÿคบ' Fencer, /// \u{1f93b}: '๐Ÿคป' ModernPentathlon, /// \u{1f93c}: '๐Ÿคผ' Wrestlers, /// \u{1f93d}: '๐Ÿคฝ' WaterPolo, /// \u{1f93e}: '๐Ÿคพ' Handball, /// \u{1f93f}: '๐Ÿคฟ' DivingMask, /// \u{1f940}: '๐Ÿฅ€' WiltedFlower, /// \u{1f941}: '๐Ÿฅ' DrumWithDrumsticks, /// \u{1f942}: '๐Ÿฅ‚' ClinkingGlasses, /// \u{1f943}: '๐Ÿฅƒ' TumblerGlass, /// \u{1f944}: '๐Ÿฅ„' Spoon, /// \u{1f945}: '๐Ÿฅ…' GoalNet, /// \u{1f946}: '๐Ÿฅ†' Rifle, /// \u{1f947}: '๐Ÿฅ‡' FirstPlaceMedal, /// \u{1f948}: '๐Ÿฅˆ' SecondPlaceMedal, /// \u{1f949}: '๐Ÿฅ‰' ThirdPlaceMedal, /// \u{1f94a}: '๐ŸฅŠ' BoxingGlove, /// \u{1f94b}: '๐Ÿฅ‹' MartialArtsUniform, /// \u{1f94c}: '๐ŸฅŒ' CurlingStone, /// \u{1f94d}: '๐Ÿฅ' LacrosseStickAndBall, /// \u{1f94e}: '๐ŸฅŽ' Softball, /// \u{1f94f}: '๐Ÿฅ' FlyingDisc, /// \u{1f950}: '๐Ÿฅ' Croissant, /// \u{1f951}: '๐Ÿฅ‘' Avocado, /// \u{1f952}: '๐Ÿฅ’' Cucumber, /// \u{1f953}: '๐Ÿฅ“' Bacon, /// \u{1f954}: '๐Ÿฅ”' Potato, /// \u{1f955}: '๐Ÿฅ•' Carrot, /// \u{1f956}: '๐Ÿฅ–' BaguetteBread, /// \u{1f957}: '๐Ÿฅ—' GreenSalad, /// \u{1f958}: '๐Ÿฅ˜' ShallowPanOfFood, /// \u{1f959}: '๐Ÿฅ™' StuffedFlatbread, /// \u{1f95a}: '๐Ÿฅš' Egg, /// \u{1f95b}: '๐Ÿฅ›' GlassOfMilk, /// \u{1f95c}: '๐Ÿฅœ' Peanuts, /// \u{1f95d}: '๐Ÿฅ' Kiwifruit, /// \u{1f95e}: '๐Ÿฅž' Pancakes, /// \u{1f95f}: '๐ŸฅŸ' Dumpling, /// \u{1f960}: '๐Ÿฅ ' FortuneCookie, /// \u{1f961}: '๐Ÿฅก' TakeoutBox, /// \u{1f962}: '๐Ÿฅข' Chopsticks, /// \u{1f963}: '๐Ÿฅฃ' BowlWithSpoon, /// \u{1f964}: '๐Ÿฅค' CupWithStraw, /// \u{1f965}: '๐Ÿฅฅ' Coconut, /// \u{1f966}: '๐Ÿฅฆ' Broccoli, /// \u{1f967}: '๐Ÿฅง' Pie, /// \u{1f968}: '๐Ÿฅจ' Pretzel, /// \u{1f969}: '๐Ÿฅฉ' CutOfMeat, /// \u{1f96a}: '๐Ÿฅช' Sandwich, /// \u{1f96b}: '๐Ÿฅซ' CannedFood, /// \u{1f96c}: '๐Ÿฅฌ' LeafyGreen, /// \u{1f96d}: '๐Ÿฅญ' Mango, /// \u{1f96e}: '๐Ÿฅฎ' MoonCake, /// \u{1f96f}: '๐Ÿฅฏ' Bagel, /// \u{1f970}: '๐Ÿฅฐ' SmilingFaceWithSmilingEyesAndThreeHearts, /// \u{1f971}: '๐Ÿฅฑ' YawningFace, /// \u{1f973}: '๐Ÿฅณ' FaceWithPartyHornAndPartyHat, /// \u{1f974}: '๐Ÿฅด' FaceWithUnevenEyesAndWavyMouth, /// \u{1f975}: '๐Ÿฅต' OverheatedFace, /// \u{1f976}: '๐Ÿฅถ' FreezingFace, /// \u{1f97a}: '๐Ÿฅบ' FaceWithPleadingEyes, /// \u{1f97b}: '๐Ÿฅป' Sari, /// \u{1f97c}: '๐Ÿฅผ' LabCoat, /// \u{1f97d}: '๐Ÿฅฝ' Goggles, /// \u{1f97e}: '๐Ÿฅพ' HikingBoot, /// \u{1f97f}: '๐Ÿฅฟ' FlatShoe, /// \u{1f980}: '๐Ÿฆ€' Crab, /// \u{1f981}: '๐Ÿฆ' LionFace, /// \u{1f982}: '๐Ÿฆ‚' Scorpion, /// \u{1f983}: '๐Ÿฆƒ' Turkey, /// \u{1f984}: '๐Ÿฆ„' UnicornFace, /// \u{1f985}: '๐Ÿฆ…' Eagle, /// \u{1f986}: '๐Ÿฆ†' Duck, /// \u{1f987}: '๐Ÿฆ‡' Bat, /// \u{1f988}: '๐Ÿฆˆ' Shark, /// \u{1f989}: '๐Ÿฆ‰' Owl, /// \u{1f98a}: '๐ŸฆŠ' FoxFace, /// \u{1f98b}: '๐Ÿฆ‹' Butterfly, /// \u{1f98c}: '๐ŸฆŒ' Deer, /// \u{1f98d}: '๐Ÿฆ' Gorilla, /// \u{1f98e}: '๐ŸฆŽ' Lizard, /// \u{1f98f}: '๐Ÿฆ' Rhinoceros, /// \u{1f990}: '๐Ÿฆ' Shrimp, /// \u{1f991}: '๐Ÿฆ‘' Squid, /// \u{1f992}: '๐Ÿฆ’' GiraffeFace, /// \u{1f993}: '๐Ÿฆ“' ZebraFace, /// \u{1f994}: '๐Ÿฆ”' Hedgehog, /// \u{1f995}: '๐Ÿฆ•' Sauropod, /// \u{1f996}: '๐Ÿฆ–' TDashRex, /// \u{1f997}: '๐Ÿฆ—' Cricket, /// \u{1f998}: '๐Ÿฆ˜' Kangaroo, /// \u{1f999}: '๐Ÿฆ™' Llama, /// \u{1f99a}: '๐Ÿฆš' Peacock, /// \u{1f99b}: '๐Ÿฆ›' Hippopotamus, /// \u{1f99c}: '๐Ÿฆœ' Parrot, /// \u{1f99d}: '๐Ÿฆ' Raccoon, /// \u{1f99e}: '๐Ÿฆž' Lobster, /// \u{1f99f}: '๐ŸฆŸ' Mosquito, /// \u{1f9a0}: '๐Ÿฆ ' Microbe, /// \u{1f9a1}: '๐Ÿฆก' Badger, /// \u{1f9a2}: '๐Ÿฆข' Swan, /// \u{1f9a5}: '๐Ÿฆฅ' Sloth, /// \u{1f9a6}: '๐Ÿฆฆ' Otter, /// \u{1f9a7}: '๐Ÿฆง' Orangutan, /// \u{1f9a8}: '๐Ÿฆจ' Skunk, /// \u{1f9a9}: '๐Ÿฆฉ' Flamingo, /// \u{1f9aa}: '๐Ÿฆช' Oyster, /// \u{1f9ae}: '๐Ÿฆฎ' GuideDog, /// \u{1f9af}: '๐Ÿฆฏ' ProbingCane, /// \u{1f9b0}: '๐Ÿฆฐ' EmojiComponentRedHair, /// \u{1f9b1}: '๐Ÿฆฑ' EmojiComponentCurlyHair, /// \u{1f9b2}: '๐Ÿฆฒ' EmojiComponentBald, /// \u{1f9b3}: '๐Ÿฆณ' EmojiComponentWhiteHair, /// \u{1f9b4}: '๐Ÿฆด' Bone, /// \u{1f9b5}: '๐Ÿฆต' Leg, /// \u{1f9b6}: '๐Ÿฆถ' Foot, /// \u{1f9b7}: '๐Ÿฆท' Tooth, /// \u{1f9b8}: '๐Ÿฆธ' Superhero, /// \u{1f9b9}: '๐Ÿฆน' Supervillain, /// \u{1f9ba}: '๐Ÿฆบ' SafetyVest, /// \u{1f9bb}: '๐Ÿฆป' EarWithHearingAid, /// \u{1f9bc}: '๐Ÿฆผ' MotorizedWheelchair, /// \u{1f9bd}: '๐Ÿฆฝ' ManualWheelchair, /// \u{1f9be}: '๐Ÿฆพ' MechanicalArm, /// \u{1f9bf}: '๐Ÿฆฟ' MechanicalLeg, /// \u{1f9c0}: '๐Ÿง€' CheeseWedge, /// \u{1f9c1}: '๐Ÿง' Cupcake, /// \u{1f9c2}: '๐Ÿง‚' SaltShaker, /// \u{1f9c3}: '๐Ÿงƒ' BeverageBox, /// \u{1f9c4}: '๐Ÿง„' Garlic, /// \u{1f9c5}: '๐Ÿง…' Onion, /// \u{1f9c6}: '๐Ÿง†' Falafel, /// \u{1f9c7}: '๐Ÿง‡' Waffle, /// \u{1f9c8}: '๐Ÿงˆ' Butter, /// \u{1f9c9}: '๐Ÿง‰' MateDrink, /// \u{1f9ca}: '๐ŸงŠ' IceCube, /// \u{1f9cd}: '๐Ÿง' StandingPerson, /// \u{1f9ce}: '๐ŸงŽ' KneelingPerson, /// \u{1f9cf}: '๐Ÿง' DeafPerson, /// \u{1f9d0}: '๐Ÿง' FaceWithMonocle, /// \u{1f9d1}: '๐Ÿง‘' Adult, /// \u{1f9d2}: '๐Ÿง’' Child, /// \u{1f9d3}: '๐Ÿง“' OlderAdult, /// \u{1f9d4}: '๐Ÿง”' BeardedPerson, /// \u{1f9d5}: '๐Ÿง•' PersonWithHeadscarf, /// \u{1f9d6}: '๐Ÿง–' PersonInSteamyRoom, /// \u{1f9d7}: '๐Ÿง—' PersonClimbing, /// \u{1f9d8}: '๐Ÿง˜' PersonInLotusPosition, /// \u{1f9d9}: '๐Ÿง™' Mage, /// \u{1f9da}: '๐Ÿงš' Fairy, /// \u{1f9db}: '๐Ÿง›' Vampire, /// \u{1f9dc}: '๐Ÿงœ' Merperson, /// \u{1f9dd}: '๐Ÿง' Elf, /// \u{1f9de}: '๐Ÿงž' Genie, /// \u{1f9df}: '๐ŸงŸ' Zombie, /// \u{1f9e0}: '๐Ÿง ' Brain, /// \u{1f9e1}: '๐Ÿงก' OrangeHeart, /// \u{1f9e2}: '๐Ÿงข' BilledCap, /// \u{1f9e3}: '๐Ÿงฃ' Scarf, /// \u{1f9e4}: '๐Ÿงค' Gloves, /// \u{1f9e5}: '๐Ÿงฅ' Coat, /// \u{1f9e6}: '๐Ÿงฆ' Socks, /// \u{1f9e7}: '๐Ÿงง' RedGiftEnvelope, /// \u{1f9e8}: '๐Ÿงจ' Firecracker, /// \u{1f9e9}: '๐Ÿงฉ' JigsawPuzzlePiece, /// \u{1f9ea}: '๐Ÿงช' TestTube, /// \u{1f9eb}: '๐Ÿงซ' PetriDish, /// \u{1f9ec}: '๐Ÿงฌ' DnaDoubleHelix, /// \u{1f9ed}: '๐Ÿงญ' Compass, /// \u{1f9ee}: '๐Ÿงฎ' Abacus, /// \u{1f9ef}: '๐Ÿงฏ' FireExtinguisher, /// \u{1f9f0}: '๐Ÿงฐ' Toolbox, /// \u{1f9f1}: '๐Ÿงฑ' Brick, /// \u{1f9f2}: '๐Ÿงฒ' Magnet, /// \u{1f9f3}: '๐Ÿงณ' Luggage, /// \u{1f9f4}: '๐Ÿงด' LotionBottle, /// \u{1f9f5}: '๐Ÿงต' SpoolOfThread, /// \u{1f9f6}: '๐Ÿงถ' BallOfYarn, /// \u{1f9f7}: '๐Ÿงท' SafetyPin, /// \u{1f9f8}: '๐Ÿงธ' TeddyBear, /// \u{1f9f9}: '๐Ÿงน' Broom, /// \u{1f9fa}: '๐Ÿงบ' Basket, /// \u{1f9fb}: '๐Ÿงป' RollOfPaper, /// \u{1f9fc}: '๐Ÿงผ' BarOfSoap, /// \u{1f9fd}: '๐Ÿงฝ' Sponge, /// \u{1f9fe}: '๐Ÿงพ' Receipt, } impl Into<char> for SupplementalSymbolsandPictographs { fn into(self) -> char { match self { SupplementalSymbolsandPictographs::CircledCrossFormeeWithFourDots => '๐Ÿค€', SupplementalSymbolsandPictographs::CircledCrossFormeeWithTwoDots => '๐Ÿค', SupplementalSymbolsandPictographs::CircledCrossFormee => '๐Ÿค‚', SupplementalSymbolsandPictographs::LeftHalfCircleWithFourDots => '๐Ÿคƒ', SupplementalSymbolsandPictographs::LeftHalfCircleWithThreeDots => '๐Ÿค„', SupplementalSymbolsandPictographs::LeftHalfCircleWithTwoDots => '๐Ÿค…', SupplementalSymbolsandPictographs::LeftHalfCircleWithDot => '๐Ÿค†', SupplementalSymbolsandPictographs::LeftHalfCircle => '๐Ÿค‡', SupplementalSymbolsandPictographs::DownwardFacingHook => '๐Ÿคˆ', SupplementalSymbolsandPictographs::DownwardFacingNotchedHook => '๐Ÿค‰', SupplementalSymbolsandPictographs::DownwardFacingHookWithDot => '๐ŸคŠ', SupplementalSymbolsandPictographs::DownwardFacingNotchedHookWithDot => '๐Ÿค‹', SupplementalSymbolsandPictographs::WhiteHeart => '๐Ÿค', SupplementalSymbolsandPictographs::BrownHeart => '๐ŸคŽ', SupplementalSymbolsandPictographs::PinchingHand => '๐Ÿค', SupplementalSymbolsandPictographs::ZipperDashMouthFace => '๐Ÿค', SupplementalSymbolsandPictographs::MoneyDashMouthFace => '๐Ÿค‘', SupplementalSymbolsandPictographs::FaceWithThermometer => '๐Ÿค’', SupplementalSymbolsandPictographs::NerdFace => '๐Ÿค“', SupplementalSymbolsandPictographs::ThinkingFace => '๐Ÿค”', SupplementalSymbolsandPictographs::FaceWithHeadDashBandage => '๐Ÿค•', SupplementalSymbolsandPictographs::RobotFace => '๐Ÿค–', SupplementalSymbolsandPictographs::HuggingFace => '๐Ÿค—', SupplementalSymbolsandPictographs::SignOfTheHorns => '๐Ÿค˜', SupplementalSymbolsandPictographs::CallMeHand => '๐Ÿค™', SupplementalSymbolsandPictographs::RaisedBackOfHand => '๐Ÿคš', SupplementalSymbolsandPictographs::LeftDashFacingFist => '๐Ÿค›', SupplementalSymbolsandPictographs::RightDashFacingFist => '๐Ÿคœ', SupplementalSymbolsandPictographs::Handshake => '๐Ÿค', SupplementalSymbolsandPictographs::HandWithIndexAndMiddleFingersCrossed => '๐Ÿคž', SupplementalSymbolsandPictographs::ILoveYouHandSign => '๐ŸคŸ', SupplementalSymbolsandPictographs::FaceWithCowboyHat => '๐Ÿค ', SupplementalSymbolsandPictographs::ClownFace => '๐Ÿคก', SupplementalSymbolsandPictographs::NauseatedFace => '๐Ÿคข', SupplementalSymbolsandPictographs::RollingOnTheFloorLaughing => '๐Ÿคฃ', SupplementalSymbolsandPictographs::DroolingFace => '๐Ÿคค', SupplementalSymbolsandPictographs::LyingFace => '๐Ÿคฅ', SupplementalSymbolsandPictographs::FacePalm => '๐Ÿคฆ', SupplementalSymbolsandPictographs::SneezingFace => '๐Ÿคง', SupplementalSymbolsandPictographs::FaceWithOneEyebrowRaised => '๐Ÿคจ', SupplementalSymbolsandPictographs::GrinningFaceWithStarEyes => '๐Ÿคฉ', SupplementalSymbolsandPictographs::GrinningFaceWithOneLargeAndOneSmallEye => '๐Ÿคช', SupplementalSymbolsandPictographs::FaceWithFingerCoveringClosedLips => '๐Ÿคซ', SupplementalSymbolsandPictographs::SeriousFaceWithSymbolsCoveringMouth => '๐Ÿคฌ', SupplementalSymbolsandPictographs::SmilingFaceWithSmilingEyesAndHandCoveringMouth => '๐Ÿคญ', SupplementalSymbolsandPictographs::FaceWithOpenMouthVomiting => '๐Ÿคฎ', SupplementalSymbolsandPictographs::ShockedFaceWithExplodingHead => '๐Ÿคฏ', SupplementalSymbolsandPictographs::PregnantWoman => '๐Ÿคฐ', SupplementalSymbolsandPictographs::BreastDashFeeding => '๐Ÿคฑ', SupplementalSymbolsandPictographs::PalmsUpTogether => '๐Ÿคฒ', SupplementalSymbolsandPictographs::Selfie => '๐Ÿคณ', SupplementalSymbolsandPictographs::Prince => '๐Ÿคด', SupplementalSymbolsandPictographs::ManInTuxedo => '๐Ÿคต', SupplementalSymbolsandPictographs::MotherChristmas => '๐Ÿคถ', SupplementalSymbolsandPictographs::Shrug => '๐Ÿคท', SupplementalSymbolsandPictographs::PersonDoingCartwheel => '๐Ÿคธ', SupplementalSymbolsandPictographs::Juggling => '๐Ÿคน', SupplementalSymbolsandPictographs::Fencer => '๐Ÿคบ', SupplementalSymbolsandPictographs::ModernPentathlon => '๐Ÿคป', SupplementalSymbolsandPictographs::Wrestlers => '๐Ÿคผ', SupplementalSymbolsandPictographs::WaterPolo => '๐Ÿคฝ', SupplementalSymbolsandPictographs::Handball => '๐Ÿคพ', SupplementalSymbolsandPictographs::DivingMask => '๐Ÿคฟ', SupplementalSymbolsandPictographs::WiltedFlower => '๐Ÿฅ€', SupplementalSymbolsandPictographs::DrumWithDrumsticks => '๐Ÿฅ', SupplementalSymbolsandPictographs::ClinkingGlasses => '๐Ÿฅ‚', SupplementalSymbolsandPictographs::TumblerGlass => '๐Ÿฅƒ', SupplementalSymbolsandPictographs::Spoon => '๐Ÿฅ„', SupplementalSymbolsandPictographs::GoalNet => '๐Ÿฅ…', SupplementalSymbolsandPictographs::Rifle => '๐Ÿฅ†', SupplementalSymbolsandPictographs::FirstPlaceMedal => '๐Ÿฅ‡', SupplementalSymbolsandPictographs::SecondPlaceMedal => '๐Ÿฅˆ', SupplementalSymbolsandPictographs::ThirdPlaceMedal => '๐Ÿฅ‰', SupplementalSymbolsandPictographs::BoxingGlove => '๐ŸฅŠ', SupplementalSymbolsandPictographs::MartialArtsUniform => '๐Ÿฅ‹', SupplementalSymbolsandPictographs::CurlingStone => '๐ŸฅŒ', SupplementalSymbolsandPictographs::LacrosseStickAndBall => '๐Ÿฅ', SupplementalSymbolsandPictographs::Softball => '๐ŸฅŽ', SupplementalSymbolsandPictographs::FlyingDisc => '๐Ÿฅ', SupplementalSymbolsandPictographs::Croissant => '๐Ÿฅ', SupplementalSymbolsandPictographs::Avocado => '๐Ÿฅ‘', SupplementalSymbolsandPictographs::Cucumber => '๐Ÿฅ’', SupplementalSymbolsandPictographs::Bacon => '๐Ÿฅ“', SupplementalSymbolsandPictographs::Potato => '๐Ÿฅ”', SupplementalSymbolsandPictographs::Carrot => '๐Ÿฅ•', SupplementalSymbolsandPictographs::BaguetteBread => '๐Ÿฅ–', SupplementalSymbolsandPictographs::GreenSalad => '๐Ÿฅ—', SupplementalSymbolsandPictographs::ShallowPanOfFood => '๐Ÿฅ˜', SupplementalSymbolsandPictographs::StuffedFlatbread => '๐Ÿฅ™', SupplementalSymbolsandPictographs::Egg => '๐Ÿฅš', SupplementalSymbolsandPictographs::GlassOfMilk => '๐Ÿฅ›', SupplementalSymbolsandPictographs::Peanuts => '๐Ÿฅœ', SupplementalSymbolsandPictographs::Kiwifruit => '๐Ÿฅ', SupplementalSymbolsandPictographs::Pancakes => '๐Ÿฅž', SupplementalSymbolsandPictographs::Dumpling => '๐ŸฅŸ', SupplementalSymbolsandPictographs::FortuneCookie => '๐Ÿฅ ', SupplementalSymbolsandPictographs::TakeoutBox => '๐Ÿฅก', SupplementalSymbolsandPictographs::Chopsticks => '๐Ÿฅข', SupplementalSymbolsandPictographs::BowlWithSpoon => '๐Ÿฅฃ', SupplementalSymbolsandPictographs::CupWithStraw => '๐Ÿฅค', SupplementalSymbolsandPictographs::Coconut => '๐Ÿฅฅ', SupplementalSymbolsandPictographs::Broccoli => '๐Ÿฅฆ', SupplementalSymbolsandPictographs::Pie => '๐Ÿฅง', SupplementalSymbolsandPictographs::Pretzel => '๐Ÿฅจ', SupplementalSymbolsandPictographs::CutOfMeat => '๐Ÿฅฉ', SupplementalSymbolsandPictographs::Sandwich => '๐Ÿฅช', SupplementalSymbolsandPictographs::CannedFood => '๐Ÿฅซ', SupplementalSymbolsandPictographs::LeafyGreen => '๐Ÿฅฌ', SupplementalSymbolsandPictographs::Mango => '๐Ÿฅญ', SupplementalSymbolsandPictographs::MoonCake => '๐Ÿฅฎ', SupplementalSymbolsandPictographs::Bagel => '๐Ÿฅฏ', SupplementalSymbolsandPictographs::SmilingFaceWithSmilingEyesAndThreeHearts => '๐Ÿฅฐ', SupplementalSymbolsandPictographs::YawningFace => '๐Ÿฅฑ', SupplementalSymbolsandPictographs::FaceWithPartyHornAndPartyHat => '๐Ÿฅณ', SupplementalSymbolsandPictographs::FaceWithUnevenEyesAndWavyMouth => '๐Ÿฅด', SupplementalSymbolsandPictographs::OverheatedFace => '๐Ÿฅต', SupplementalSymbolsandPictographs::FreezingFace => '๐Ÿฅถ', SupplementalSymbolsandPictographs::FaceWithPleadingEyes => '๐Ÿฅบ', SupplementalSymbolsandPictographs::Sari => '๐Ÿฅป', SupplementalSymbolsandPictographs::LabCoat => '๐Ÿฅผ', SupplementalSymbolsandPictographs::Goggles => '๐Ÿฅฝ', SupplementalSymbolsandPictographs::HikingBoot => '๐Ÿฅพ', SupplementalSymbolsandPictographs::FlatShoe => '๐Ÿฅฟ', SupplementalSymbolsandPictographs::Crab => '๐Ÿฆ€', SupplementalSymbolsandPictographs::LionFace => '๐Ÿฆ', SupplementalSymbolsandPictographs::Scorpion => '๐Ÿฆ‚', SupplementalSymbolsandPictographs::Turkey => '๐Ÿฆƒ', SupplementalSymbolsandPictographs::UnicornFace => '๐Ÿฆ„', SupplementalSymbolsandPictographs::Eagle => '๐Ÿฆ…', SupplementalSymbolsandPictographs::Duck => '๐Ÿฆ†', SupplementalSymbolsandPictographs::Bat => '๐Ÿฆ‡', SupplementalSymbolsandPictographs::Shark => '๐Ÿฆˆ', SupplementalSymbolsandPictographs::Owl => '๐Ÿฆ‰', SupplementalSymbolsandPictographs::FoxFace => '๐ŸฆŠ', SupplementalSymbolsandPictographs::Butterfly => '๐Ÿฆ‹', SupplementalSymbolsandPictographs::Deer => '๐ŸฆŒ', SupplementalSymbolsandPictographs::Gorilla => '๐Ÿฆ', SupplementalSymbolsandPictographs::Lizard => '๐ŸฆŽ', SupplementalSymbolsandPictographs::Rhinoceros => '๐Ÿฆ', SupplementalSymbolsandPictographs::Shrimp => '๐Ÿฆ', SupplementalSymbolsandPictographs::Squid => '๐Ÿฆ‘', SupplementalSymbolsandPictographs::GiraffeFace => '๐Ÿฆ’', SupplementalSymbolsandPictographs::ZebraFace => '๐Ÿฆ“', SupplementalSymbolsandPictographs::Hedgehog => '๐Ÿฆ”', SupplementalSymbolsandPictographs::Sauropod => '๐Ÿฆ•', SupplementalSymbolsandPictographs::TDashRex => '๐Ÿฆ–', SupplementalSymbolsandPictographs::Cricket => '๐Ÿฆ—', SupplementalSymbolsandPictographs::Kangaroo => '๐Ÿฆ˜', SupplementalSymbolsandPictographs::Llama => '๐Ÿฆ™', SupplementalSymbolsandPictographs::Peacock => '๐Ÿฆš', SupplementalSymbolsandPictographs::Hippopotamus => '๐Ÿฆ›', SupplementalSymbolsandPictographs::Parrot => '๐Ÿฆœ', SupplementalSymbolsandPictographs::Raccoon => '๐Ÿฆ', SupplementalSymbolsandPictographs::Lobster => '๐Ÿฆž', SupplementalSymbolsandPictographs::Mosquito => '๐ŸฆŸ', SupplementalSymbolsandPictographs::Microbe => '๐Ÿฆ ', SupplementalSymbolsandPictographs::Badger => '๐Ÿฆก', SupplementalSymbolsandPictographs::Swan => '๐Ÿฆข', SupplementalSymbolsandPictographs::Sloth => '๐Ÿฆฅ', SupplementalSymbolsandPictographs::Otter => '๐Ÿฆฆ', SupplementalSymbolsandPictographs::Orangutan => '๐Ÿฆง', SupplementalSymbolsandPictographs::Skunk => '๐Ÿฆจ', SupplementalSymbolsandPictographs::Flamingo => '๐Ÿฆฉ', SupplementalSymbolsandPictographs::Oyster => '๐Ÿฆช', SupplementalSymbolsandPictographs::GuideDog => '๐Ÿฆฎ', SupplementalSymbolsandPictographs::ProbingCane => '๐Ÿฆฏ', SupplementalSymbolsandPictographs::EmojiComponentRedHair => '๐Ÿฆฐ', SupplementalSymbolsandPictographs::EmojiComponentCurlyHair => '๐Ÿฆฑ', SupplementalSymbolsandPictographs::EmojiComponentBald => '๐Ÿฆฒ', SupplementalSymbolsandPictographs::EmojiComponentWhiteHair => '๐Ÿฆณ', SupplementalSymbolsandPictographs::Bone => '๐Ÿฆด', SupplementalSymbolsandPictographs::Leg => '๐Ÿฆต', SupplementalSymbolsandPictographs::Foot => '๐Ÿฆถ', SupplementalSymbolsandPictographs::Tooth => '๐Ÿฆท', SupplementalSymbolsandPictographs::Superhero => '๐Ÿฆธ', SupplementalSymbolsandPictographs::Supervillain => '๐Ÿฆน', SupplementalSymbolsandPictographs::SafetyVest => '๐Ÿฆบ', SupplementalSymbolsandPictographs::EarWithHearingAid => '๐Ÿฆป', SupplementalSymbolsandPictographs::MotorizedWheelchair => '๐Ÿฆผ', SupplementalSymbolsandPictographs::ManualWheelchair => '๐Ÿฆฝ', SupplementalSymbolsandPictographs::MechanicalArm => '๐Ÿฆพ', SupplementalSymbolsandPictographs::MechanicalLeg => '๐Ÿฆฟ', SupplementalSymbolsandPictographs::CheeseWedge => '๐Ÿง€', SupplementalSymbolsandPictographs::Cupcake => '๐Ÿง', SupplementalSymbolsandPictographs::SaltShaker => '๐Ÿง‚', SupplementalSymbolsandPictographs::BeverageBox => '๐Ÿงƒ', SupplementalSymbolsandPictographs::Garlic => '๐Ÿง„', SupplementalSymbolsandPictographs::Onion => '๐Ÿง…', SupplementalSymbolsandPictographs::Falafel => '๐Ÿง†', SupplementalSymbolsandPictographs::Waffle => '๐Ÿง‡', SupplementalSymbolsandPictographs::Butter => '๐Ÿงˆ', SupplementalSymbolsandPictographs::MateDrink => '๐Ÿง‰', SupplementalSymbolsandPictographs::IceCube => '๐ŸงŠ', SupplementalSymbolsandPictographs::StandingPerson => '๐Ÿง', SupplementalSymbolsandPictographs::KneelingPerson => '๐ŸงŽ', SupplementalSymbolsandPictographs::DeafPerson => '๐Ÿง', SupplementalSymbolsandPictographs::FaceWithMonocle => '๐Ÿง', SupplementalSymbolsandPictographs::Adult => '๐Ÿง‘', SupplementalSymbolsandPictographs::Child => '๐Ÿง’', SupplementalSymbolsandPictographs::OlderAdult => '๐Ÿง“', SupplementalSymbolsandPictographs::BeardedPerson => '๐Ÿง”', SupplementalSymbolsandPictographs::PersonWithHeadscarf => '๐Ÿง•', SupplementalSymbolsandPictographs::PersonInSteamyRoom => '๐Ÿง–', SupplementalSymbolsandPictographs::PersonClimbing => '๐Ÿง—', SupplementalSymbolsandPictographs::PersonInLotusPosition => '๐Ÿง˜', SupplementalSymbolsandPictographs::Mage => '๐Ÿง™', SupplementalSymbolsandPictographs::Fairy => '๐Ÿงš', SupplementalSymbolsandPictographs::Vampire => '๐Ÿง›', SupplementalSymbolsandPictographs::Merperson => '๐Ÿงœ', SupplementalSymbolsandPictographs::Elf => '๐Ÿง', SupplementalSymbolsandPictographs::Genie => '๐Ÿงž', SupplementalSymbolsandPictographs::Zombie => '๐ŸงŸ', SupplementalSymbolsandPictographs::Brain => '๐Ÿง ', SupplementalSymbolsandPictographs::OrangeHeart => '๐Ÿงก', SupplementalSymbolsandPictographs::BilledCap => '๐Ÿงข', SupplementalSymbolsandPictographs::Scarf => '๐Ÿงฃ', SupplementalSymbolsandPictographs::Gloves => '๐Ÿงค', SupplementalSymbolsandPictographs::Coat => '๐Ÿงฅ', SupplementalSymbolsandPictographs::Socks => '๐Ÿงฆ', SupplementalSymbolsandPictographs::RedGiftEnvelope => '๐Ÿงง', SupplementalSymbolsandPictographs::Firecracker => '๐Ÿงจ', SupplementalSymbolsandPictographs::JigsawPuzzlePiece => '๐Ÿงฉ', SupplementalSymbolsandPictographs::TestTube => '๐Ÿงช', SupplementalSymbolsandPictographs::PetriDish => '๐Ÿงซ', SupplementalSymbolsandPictographs::DnaDoubleHelix => '๐Ÿงฌ', SupplementalSymbolsandPictographs::Compass => '๐Ÿงญ', SupplementalSymbolsandPictographs::Abacus => '๐Ÿงฎ', SupplementalSymbolsandPictographs::FireExtinguisher => '๐Ÿงฏ', SupplementalSymbolsandPictographs::Toolbox => '๐Ÿงฐ', SupplementalSymbolsandPictographs::Brick => '๐Ÿงฑ', SupplementalSymbolsandPictographs::Magnet => '๐Ÿงฒ', SupplementalSymbolsandPictographs::Luggage => '๐Ÿงณ', SupplementalSymbolsandPictographs::LotionBottle => '๐Ÿงด', SupplementalSymbolsandPictographs::SpoolOfThread => '๐Ÿงต', SupplementalSymbolsandPictographs::BallOfYarn => '๐Ÿงถ', SupplementalSymbolsandPictographs::SafetyPin => '๐Ÿงท', SupplementalSymbolsandPictographs::TeddyBear => '๐Ÿงธ', SupplementalSymbolsandPictographs::Broom => '๐Ÿงน', SupplementalSymbolsandPictographs::Basket => '๐Ÿงบ', SupplementalSymbolsandPictographs::RollOfPaper => '๐Ÿงป', SupplementalSymbolsandPictographs::BarOfSoap => '๐Ÿงผ', SupplementalSymbolsandPictographs::Sponge => '๐Ÿงฝ', SupplementalSymbolsandPictographs::Receipt => '๐Ÿงพ', } } } impl std::convert::TryFrom<char> for SupplementalSymbolsandPictographs { type Error = (); fn try_from(c: char) -> Result<Self, Self::Error> { match c { '๐Ÿค€' => Ok(SupplementalSymbolsandPictographs::CircledCrossFormeeWithFourDots), '๐Ÿค' => Ok(SupplementalSymbolsandPictographs::CircledCrossFormeeWithTwoDots), '๐Ÿค‚' => Ok(SupplementalSymbolsandPictographs::CircledCrossFormee), '๐Ÿคƒ' => Ok(SupplementalSymbolsandPictographs::LeftHalfCircleWithFourDots), '๐Ÿค„' => Ok(SupplementalSymbolsandPictographs::LeftHalfCircleWithThreeDots), '๐Ÿค…' => Ok(SupplementalSymbolsandPictographs::LeftHalfCircleWithTwoDots), '๐Ÿค†' => Ok(SupplementalSymbolsandPictographs::LeftHalfCircleWithDot), '๐Ÿค‡' => Ok(SupplementalSymbolsandPictographs::LeftHalfCircle), '๐Ÿคˆ' => Ok(SupplementalSymbolsandPictographs::DownwardFacingHook), '๐Ÿค‰' => Ok(SupplementalSymbolsandPictographs::DownwardFacingNotchedHook), '๐ŸคŠ' => Ok(SupplementalSymbolsandPictographs::DownwardFacingHookWithDot), '๐Ÿค‹' => Ok(SupplementalSymbolsandPictographs::DownwardFacingNotchedHookWithDot), '๐Ÿค' => Ok(SupplementalSymbolsandPictographs::WhiteHeart), '๐ŸคŽ' => Ok(SupplementalSymbolsandPictographs::BrownHeart), '๐Ÿค' => Ok(SupplementalSymbolsandPictographs::PinchingHand), '๐Ÿค' => Ok(SupplementalSymbolsandPictographs::ZipperDashMouthFace), '๐Ÿค‘' => Ok(SupplementalSymbolsandPictographs::MoneyDashMouthFace), '๐Ÿค’' => Ok(SupplementalSymbolsandPictographs::FaceWithThermometer), '๐Ÿค“' => Ok(SupplementalSymbolsandPictographs::NerdFace), '๐Ÿค”' => Ok(SupplementalSymbolsandPictographs::ThinkingFace), '๐Ÿค•' => Ok(SupplementalSymbolsandPictographs::FaceWithHeadDashBandage), '๐Ÿค–' => Ok(SupplementalSymbolsandPictographs::RobotFace), '๐Ÿค—' => Ok(SupplementalSymbolsandPictographs::HuggingFace), '๐Ÿค˜' => Ok(SupplementalSymbolsandPictographs::SignOfTheHorns), '๐Ÿค™' => Ok(SupplementalSymbolsandPictographs::CallMeHand), '๐Ÿคš' => Ok(SupplementalSymbolsandPictographs::RaisedBackOfHand), '๐Ÿค›' => Ok(SupplementalSymbolsandPictographs::LeftDashFacingFist), '๐Ÿคœ' => Ok(SupplementalSymbolsandPictographs::RightDashFacingFist), '๐Ÿค' => Ok(SupplementalSymbolsandPictographs::Handshake), '๐Ÿคž' => Ok(SupplementalSymbolsandPictographs::HandWithIndexAndMiddleFingersCrossed), '๐ŸคŸ' => Ok(SupplementalSymbolsandPictographs::ILoveYouHandSign), '๐Ÿค ' => Ok(SupplementalSymbolsandPictographs::FaceWithCowboyHat), '๐Ÿคก' => Ok(SupplementalSymbolsandPictographs::ClownFace), '๐Ÿคข' => Ok(SupplementalSymbolsandPictographs::NauseatedFace), '๐Ÿคฃ' => Ok(SupplementalSymbolsandPictographs::RollingOnTheFloorLaughing), '๐Ÿคค' => Ok(SupplementalSymbolsandPictographs::DroolingFace), '๐Ÿคฅ' => Ok(SupplementalSymbolsandPictographs::LyingFace), '๐Ÿคฆ' => Ok(SupplementalSymbolsandPictographs::FacePalm), '๐Ÿคง' => Ok(SupplementalSymbolsandPictographs::SneezingFace), '๐Ÿคจ' => Ok(SupplementalSymbolsandPictographs::FaceWithOneEyebrowRaised), '๐Ÿคฉ' => Ok(SupplementalSymbolsandPictographs::GrinningFaceWithStarEyes), '๐Ÿคช' => Ok(SupplementalSymbolsandPictographs::GrinningFaceWithOneLargeAndOneSmallEye), '๐Ÿคซ' => Ok(SupplementalSymbolsandPictographs::FaceWithFingerCoveringClosedLips), '๐Ÿคฌ' => Ok(SupplementalSymbolsandPictographs::SeriousFaceWithSymbolsCoveringMouth), '๐Ÿคญ' => Ok(SupplementalSymbolsandPictographs::SmilingFaceWithSmilingEyesAndHandCoveringMouth), '๐Ÿคฎ' => Ok(SupplementalSymbolsandPictographs::FaceWithOpenMouthVomiting), '๐Ÿคฏ' => Ok(SupplementalSymbolsandPictographs::ShockedFaceWithExplodingHead), '๐Ÿคฐ' => Ok(SupplementalSymbolsandPictographs::PregnantWoman), '๐Ÿคฑ' => Ok(SupplementalSymbolsandPictographs::BreastDashFeeding), '๐Ÿคฒ' => Ok(SupplementalSymbolsandPictographs::PalmsUpTogether), '๐Ÿคณ' => Ok(SupplementalSymbolsandPictographs::Selfie), '๐Ÿคด' => Ok(SupplementalSymbolsandPictographs::Prince), '๐Ÿคต' => Ok(SupplementalSymbolsandPictographs::ManInTuxedo), '๐Ÿคถ' => Ok(SupplementalSymbolsandPictographs::MotherChristmas), '๐Ÿคท' => Ok(SupplementalSymbolsandPictographs::Shrug), '๐Ÿคธ' => Ok(SupplementalSymbolsandPictographs::PersonDoingCartwheel), '๐Ÿคน' => Ok(SupplementalSymbolsandPictographs::Juggling), '๐Ÿคบ' => Ok(SupplementalSymbolsandPictographs::Fencer), '๐Ÿคป' => Ok(SupplementalSymbolsandPictographs::ModernPentathlon), '๐Ÿคผ' => Ok(SupplementalSymbolsandPictographs::Wrestlers), '๐Ÿคฝ' => Ok(SupplementalSymbolsandPictographs::WaterPolo), '๐Ÿคพ' => Ok(SupplementalSymbolsandPictographs::Handball), '๐Ÿคฟ' => Ok(SupplementalSymbolsandPictographs::DivingMask), '๐Ÿฅ€' => Ok(SupplementalSymbolsandPictographs::WiltedFlower), '๐Ÿฅ' => Ok(SupplementalSymbolsandPictographs::DrumWithDrumsticks), '๐Ÿฅ‚' => Ok(SupplementalSymbolsandPictographs::ClinkingGlasses), '๐Ÿฅƒ' => Ok(SupplementalSymbolsandPictographs::TumblerGlass), '๐Ÿฅ„' => Ok(SupplementalSymbolsandPictographs::Spoon), '๐Ÿฅ…' => Ok(SupplementalSymbolsandPictographs::GoalNet), '๐Ÿฅ†' => Ok(SupplementalSymbolsandPictographs::Rifle), '๐Ÿฅ‡' => Ok(SupplementalSymbolsandPictographs::FirstPlaceMedal), '๐Ÿฅˆ' => Ok(SupplementalSymbolsandPictographs::SecondPlaceMedal), '๐Ÿฅ‰' => Ok(SupplementalSymbolsandPictographs::ThirdPlaceMedal), '๐ŸฅŠ' => Ok(SupplementalSymbolsandPictographs::BoxingGlove), '๐Ÿฅ‹' => Ok(SupplementalSymbolsandPictographs::MartialArtsUniform), '๐ŸฅŒ' => Ok(SupplementalSymbolsandPictographs::CurlingStone), '๐Ÿฅ' => Ok(SupplementalSymbolsandPictographs::LacrosseStickAndBall), '๐ŸฅŽ' => Ok(SupplementalSymbolsandPictographs::Softball), '๐Ÿฅ' => Ok(SupplementalSymbolsandPictographs::FlyingDisc), '๐Ÿฅ' => Ok(SupplementalSymbolsandPictographs::Croissant), '๐Ÿฅ‘' => Ok(SupplementalSymbolsandPictographs::Avocado), '๐Ÿฅ’' => Ok(SupplementalSymbolsandPictographs::Cucumber), '๐Ÿฅ“' => Ok(SupplementalSymbolsandPictographs::Bacon), '๐Ÿฅ”' => Ok(SupplementalSymbolsandPictographs::Potato), '๐Ÿฅ•' => Ok(SupplementalSymbolsandPictographs::Carrot), '๐Ÿฅ–' => Ok(SupplementalSymbolsandPictographs::BaguetteBread), '๐Ÿฅ—' => Ok(SupplementalSymbolsandPictographs::GreenSalad), '๐Ÿฅ˜' => Ok(SupplementalSymbolsandPictographs::ShallowPanOfFood), '๐Ÿฅ™' => Ok(SupplementalSymbolsandPictographs::StuffedFlatbread), '๐Ÿฅš' => Ok(SupplementalSymbolsandPictographs::Egg), '๐Ÿฅ›' => Ok(SupplementalSymbolsandPictographs::GlassOfMilk), '๐Ÿฅœ' => Ok(SupplementalSymbolsandPictographs::Peanuts), '๐Ÿฅ' => Ok(SupplementalSymbolsandPictographs::Kiwifruit), '๐Ÿฅž' => Ok(SupplementalSymbolsandPictographs::Pancakes), '๐ŸฅŸ' => Ok(SupplementalSymbolsandPictographs::Dumpling), '๐Ÿฅ ' => Ok(SupplementalSymbolsandPictographs::FortuneCookie), '๐Ÿฅก' => Ok(SupplementalSymbolsandPictographs::TakeoutBox), '๐Ÿฅข' => Ok(SupplementalSymbolsandPictographs::Chopsticks), '๐Ÿฅฃ' => Ok(SupplementalSymbolsandPictographs::BowlWithSpoon), '๐Ÿฅค' => Ok(SupplementalSymbolsandPictographs::CupWithStraw), '๐Ÿฅฅ' => Ok(SupplementalSymbolsandPictographs::Coconut), '๐Ÿฅฆ' => Ok(SupplementalSymbolsandPictographs::Broccoli), '๐Ÿฅง' => Ok(SupplementalSymbolsandPictographs::Pie), '๐Ÿฅจ' => Ok(SupplementalSymbolsandPictographs::Pretzel), '๐Ÿฅฉ' => Ok(SupplementalSymbolsandPictographs::CutOfMeat), '๐Ÿฅช' => Ok(SupplementalSymbolsandPictographs::Sandwich), '๐Ÿฅซ' => Ok(SupplementalSymbolsandPictographs::CannedFood), '๐Ÿฅฌ' => Ok(SupplementalSymbolsandPictographs::LeafyGreen), '๐Ÿฅญ' => Ok(SupplementalSymbolsandPictographs::Mango), '๐Ÿฅฎ' => Ok(SupplementalSymbolsandPictographs::MoonCake), '๐Ÿฅฏ' => Ok(SupplementalSymbolsandPictographs::Bagel), '๐Ÿฅฐ' => Ok(SupplementalSymbolsandPictographs::SmilingFaceWithSmilingEyesAndThreeHearts), '๐Ÿฅฑ' => Ok(SupplementalSymbolsandPictographs::YawningFace), '๐Ÿฅณ' => Ok(SupplementalSymbolsandPictographs::FaceWithPartyHornAndPartyHat), '๐Ÿฅด' => Ok(SupplementalSymbolsandPictographs::FaceWithUnevenEyesAndWavyMouth), '๐Ÿฅต' => Ok(SupplementalSymbolsandPictographs::OverheatedFace), '๐Ÿฅถ' => Ok(SupplementalSymbolsandPictographs::FreezingFace), '๐Ÿฅบ' => Ok(SupplementalSymbolsandPictographs::FaceWithPleadingEyes), '๐Ÿฅป' => Ok(SupplementalSymbolsandPictographs::Sari), '๐Ÿฅผ' => Ok(SupplementalSymbolsandPictographs::LabCoat), '๐Ÿฅฝ' => Ok(SupplementalSymbolsandPictographs::Goggles), '๐Ÿฅพ' => Ok(SupplementalSymbolsandPictographs::HikingBoot), '๐Ÿฅฟ' => Ok(SupplementalSymbolsandPictographs::FlatShoe), '๐Ÿฆ€' => Ok(SupplementalSymbolsandPictographs::Crab), '๐Ÿฆ' => Ok(SupplementalSymbolsandPictographs::LionFace), '๐Ÿฆ‚' => Ok(SupplementalSymbolsandPictographs::Scorpion), '๐Ÿฆƒ' => Ok(SupplementalSymbolsandPictographs::Turkey), '๐Ÿฆ„' => Ok(SupplementalSymbolsandPictographs::UnicornFace), '๐Ÿฆ…' => Ok(SupplementalSymbolsandPictographs::Eagle), '๐Ÿฆ†' => Ok(SupplementalSymbolsandPictographs::Duck), '๐Ÿฆ‡' => Ok(SupplementalSymbolsandPictographs::Bat), '๐Ÿฆˆ' => Ok(SupplementalSymbolsandPictographs::Shark), '๐Ÿฆ‰' => Ok(SupplementalSymbolsandPictographs::Owl), '๐ŸฆŠ' => Ok(SupplementalSymbolsandPictographs::FoxFace), '๐Ÿฆ‹' => Ok(SupplementalSymbolsandPictographs::Butterfly), '๐ŸฆŒ' => Ok(SupplementalSymbolsandPictographs::Deer), '๐Ÿฆ' => Ok(SupplementalSymbolsandPictographs::Gorilla), '๐ŸฆŽ' => Ok(SupplementalSymbolsandPictographs::Lizard), '๐Ÿฆ' => Ok(SupplementalSymbolsandPictographs::Rhinoceros), '๐Ÿฆ' => Ok(SupplementalSymbolsandPictographs::Shrimp), '๐Ÿฆ‘' => Ok(SupplementalSymbolsandPictographs::Squid), '๐Ÿฆ’' => Ok(SupplementalSymbolsandPictographs::GiraffeFace), '๐Ÿฆ“' => Ok(SupplementalSymbolsandPictographs::ZebraFace), '๐Ÿฆ”' => Ok(SupplementalSymbolsandPictographs::Hedgehog), '๐Ÿฆ•' => Ok(SupplementalSymbolsandPictographs::Sauropod), '๐Ÿฆ–' => Ok(SupplementalSymbolsandPictographs::TDashRex), '๐Ÿฆ—' => Ok(SupplementalSymbolsandPictographs::Cricket), '๐Ÿฆ˜' => Ok(SupplementalSymbolsandPictographs::Kangaroo), '๐Ÿฆ™' => Ok(SupplementalSymbolsandPictographs::Llama), '๐Ÿฆš' => Ok(SupplementalSymbolsandPictographs::Peacock), '๐Ÿฆ›' => Ok(SupplementalSymbolsandPictographs::Hippopotamus), '๐Ÿฆœ' => Ok(SupplementalSymbolsandPictographs::Parrot), '๐Ÿฆ' => Ok(SupplementalSymbolsandPictographs::Raccoon), '๐Ÿฆž' => Ok(SupplementalSymbolsandPictographs::Lobster), '๐ŸฆŸ' => Ok(SupplementalSymbolsandPictographs::Mosquito), '๐Ÿฆ ' => Ok(SupplementalSymbolsandPictographs::Microbe), '๐Ÿฆก' => Ok(SupplementalSymbolsandPictographs::Badger), '๐Ÿฆข' => Ok(SupplementalSymbolsandPictographs::Swan), '๐Ÿฆฅ' => Ok(SupplementalSymbolsandPictographs::Sloth), '๐Ÿฆฆ' => Ok(SupplementalSymbolsandPictographs::Otter), '๐Ÿฆง' => Ok(SupplementalSymbolsandPictographs::Orangutan), '๐Ÿฆจ' => Ok(SupplementalSymbolsandPictographs::Skunk), '๐Ÿฆฉ' => Ok(SupplementalSymbolsandPictographs::Flamingo), '๐Ÿฆช' => Ok(SupplementalSymbolsandPictographs::Oyster), '๐Ÿฆฎ' => Ok(SupplementalSymbolsandPictographs::GuideDog), '๐Ÿฆฏ' => Ok(SupplementalSymbolsandPictographs::ProbingCane), '๐Ÿฆฐ' => Ok(SupplementalSymbolsandPictographs::EmojiComponentRedHair), '๐Ÿฆฑ' => Ok(SupplementalSymbolsandPictographs::EmojiComponentCurlyHair), '๐Ÿฆฒ' => Ok(SupplementalSymbolsandPictographs::EmojiComponentBald), '๐Ÿฆณ' => Ok(SupplementalSymbolsandPictographs::EmojiComponentWhiteHair), '๐Ÿฆด' => Ok(SupplementalSymbolsandPictographs::Bone), '๐Ÿฆต' => Ok(SupplementalSymbolsandPictographs::Leg), '๐Ÿฆถ' => Ok(SupplementalSymbolsandPictographs::Foot), '๐Ÿฆท' => Ok(SupplementalSymbolsandPictographs::Tooth), '๐Ÿฆธ' => Ok(SupplementalSymbolsandPictographs::Superhero), '๐Ÿฆน' => Ok(SupplementalSymbolsandPictographs::Supervillain), '๐Ÿฆบ' => Ok(SupplementalSymbolsandPictographs::SafetyVest), '๐Ÿฆป' => Ok(SupplementalSymbolsandPictographs::EarWithHearingAid), '๐Ÿฆผ' => Ok(SupplementalSymbolsandPictographs::MotorizedWheelchair), '๐Ÿฆฝ' => Ok(SupplementalSymbolsandPictographs::ManualWheelchair), '๐Ÿฆพ' => Ok(SupplementalSymbolsandPictographs::MechanicalArm), '๐Ÿฆฟ' => Ok(SupplementalSymbolsandPictographs::MechanicalLeg), '๐Ÿง€' => Ok(SupplementalSymbolsandPictographs::CheeseWedge), '๐Ÿง' => Ok(SupplementalSymbolsandPictographs::Cupcake), '๐Ÿง‚' => Ok(SupplementalSymbolsandPictographs::SaltShaker), '๐Ÿงƒ' => Ok(SupplementalSymbolsandPictographs::BeverageBox), '๐Ÿง„' => Ok(SupplementalSymbolsandPictographs::Garlic), '๐Ÿง…' => Ok(SupplementalSymbolsandPictographs::Onion), '๐Ÿง†' => Ok(SupplementalSymbolsandPictographs::Falafel), '๐Ÿง‡' => Ok(SupplementalSymbolsandPictographs::Waffle), '๐Ÿงˆ' => Ok(SupplementalSymbolsandPictographs::Butter), '๐Ÿง‰' => Ok(SupplementalSymbolsandPictographs::MateDrink), '๐ŸงŠ' => Ok(SupplementalSymbolsandPictographs::IceCube), '๐Ÿง' => Ok(SupplementalSymbolsandPictographs::StandingPerson), '๐ŸงŽ' => Ok(SupplementalSymbolsandPictographs::KneelingPerson), '๐Ÿง' => Ok(SupplementalSymbolsandPictographs::DeafPerson), '๐Ÿง' => Ok(SupplementalSymbolsandPictographs::FaceWithMonocle), '๐Ÿง‘' => Ok(SupplementalSymbolsandPictographs::Adult), '๐Ÿง’' => Ok(SupplementalSymbolsandPictographs::Child), '๐Ÿง“' => Ok(SupplementalSymbolsandPictographs::OlderAdult), '๐Ÿง”' => Ok(SupplementalSymbolsandPictographs::BeardedPerson), '๐Ÿง•' => Ok(SupplementalSymbolsandPictographs::PersonWithHeadscarf), '๐Ÿง–' => Ok(SupplementalSymbolsandPictographs::PersonInSteamyRoom), '๐Ÿง—' => Ok(SupplementalSymbolsandPictographs::PersonClimbing), '๐Ÿง˜' => Ok(SupplementalSymbolsandPictographs::PersonInLotusPosition), '๐Ÿง™' => Ok(SupplementalSymbolsandPictographs::Mage), '๐Ÿงš' => Ok(SupplementalSymbolsandPictographs::Fairy), '๐Ÿง›' => Ok(SupplementalSymbolsandPictographs::Vampire), '๐Ÿงœ' => Ok(SupplementalSymbolsandPictographs::Merperson), '๐Ÿง' => Ok(SupplementalSymbolsandPictographs::Elf), '๐Ÿงž' => Ok(SupplementalSymbolsandPictographs::Genie), '๐ŸงŸ' => Ok(SupplementalSymbolsandPictographs::Zombie), '๐Ÿง ' => Ok(SupplementalSymbolsandPictographs::Brain), '๐Ÿงก' => Ok(SupplementalSymbolsandPictographs::OrangeHeart), '๐Ÿงข' => Ok(SupplementalSymbolsandPictographs::BilledCap), '๐Ÿงฃ' => Ok(SupplementalSymbolsandPictographs::Scarf), '๐Ÿงค' => Ok(SupplementalSymbolsandPictographs::Gloves), '๐Ÿงฅ' => Ok(SupplementalSymbolsandPictographs::Coat), '๐Ÿงฆ' => Ok(SupplementalSymbolsandPictographs::Socks), '๐Ÿงง' => Ok(SupplementalSymbolsandPictographs::RedGiftEnvelope), '๐Ÿงจ' => Ok(SupplementalSymbolsandPictographs::Firecracker), '๐Ÿงฉ' => Ok(SupplementalSymbolsandPictographs::JigsawPuzzlePiece), '๐Ÿงช' => Ok(SupplementalSymbolsandPictographs::TestTube), '๐Ÿงซ' => Ok(SupplementalSymbolsandPictographs::PetriDish), '๐Ÿงฌ' => Ok(SupplementalSymbolsandPictographs::DnaDoubleHelix), '๐Ÿงญ' => Ok(SupplementalSymbolsandPictographs::Compass), '๐Ÿงฎ' => Ok(SupplementalSymbolsandPictographs::Abacus), '๐Ÿงฏ' => Ok(SupplementalSymbolsandPictographs::FireExtinguisher), '๐Ÿงฐ' => Ok(SupplementalSymbolsandPictographs::Toolbox), '๐Ÿงฑ' => Ok(SupplementalSymbolsandPictographs::Brick), '๐Ÿงฒ' => Ok(SupplementalSymbolsandPictographs::Magnet), '๐Ÿงณ' => Ok(SupplementalSymbolsandPictographs::Luggage), '๐Ÿงด' => Ok(SupplementalSymbolsandPictographs::LotionBottle), '๐Ÿงต' => Ok(SupplementalSymbolsandPictographs::SpoolOfThread), '๐Ÿงถ' => Ok(SupplementalSymbolsandPictographs::BallOfYarn), '๐Ÿงท' => Ok(SupplementalSymbolsandPictographs::SafetyPin), '๐Ÿงธ' => Ok(SupplementalSymbolsandPictographs::TeddyBear), '๐Ÿงน' => Ok(SupplementalSymbolsandPictographs::Broom), '๐Ÿงบ' => Ok(SupplementalSymbolsandPictographs::Basket), '๐Ÿงป' => Ok(SupplementalSymbolsandPictographs::RollOfPaper), '๐Ÿงผ' => Ok(SupplementalSymbolsandPictographs::BarOfSoap), '๐Ÿงฝ' => Ok(SupplementalSymbolsandPictographs::Sponge), '๐Ÿงพ' => Ok(SupplementalSymbolsandPictographs::Receipt), _ => Err(()), } } } impl Into<u32> for SupplementalSymbolsandPictographs { fn into(self) -> u32 { let c: char = self.into(); let hex = c .escape_unicode() .to_string() .replace("\\u{", "") .replace("}", ""); u32::from_str_radix(&hex, 16).unwrap() } } impl std::convert::TryFrom<u32> for SupplementalSymbolsandPictographs { type Error = (); fn try_from(u: u32) -> Result<Self, Self::Error> { if let Ok(c) = char::try_from(u) { Self::try_from(c) } else { Err(()) } } } impl Iterator for SupplementalSymbolsandPictographs { type Item = Self; fn next(&mut self) -> Option<Self> { let index: u32 = (*self).into(); use std::convert::TryFrom; Self::try_from(index + 1).ok() } } impl SupplementalSymbolsandPictographs { /// The character with the lowest index in this unicode block pub fn new() -> Self { SupplementalSymbolsandPictographs::CircledCrossFormeeWithFourDots } /// The character's name, in sentence case pub fn name(&self) -> String { let s = std::format!("SupplementalSymbolsandPictographs{:#?}", self); string_morph::to_sentence_case(&s) } }
//use propagators::network::{ Network }; //#[test] //fn test_network_f64_add() { //let mut network : Network<f64> = Network::new(); //let a = network.make_cell(); //let b = network.make_cell(); //let c = network.make_cell(); //network.write_cell(a, 1.); //network.write_cell(b, 2.); //network.propagator_add(a, b, c); //network.run(); //let expected = 3.; //let actual = network.read_cell(c).unwrap(); //assert_eq!(expected, actual); //} //#[test] //fn test_network_f64_multiply() { //let mut network : Network<f64> = Network::new(); //let a = network.make_cell(); //let b = network.make_cell(); //let c = network.make_cell(); //network.write_cell(a, 1.5); //network.write_cell(b, 2.); //network.propagator_multiply(a, b, c); //network.run(); //let expected = 3.; //let actual = network.read_cell(c).unwrap(); //assert_eq!(expected, actual); //} //#[test] //fn test_network_f64_constraint_add() { //let mut network:Network<f64> = Network::new(); //let a = network.make_cell(); //let b = network.make_cell(); //let c = network.make_cell(); //network.write_cell(b, 2.); //network.write_cell(c, 3.); //network.constraint_add(a, b, c); //network.run(); //let expected = 1.; //let actual = network.read_cell(a).unwrap(); //assert_eq!(expected, actual); //} //#[test] //fn test_network_f64_constraint_product_a() { //let mut network:Network<f64> = Network::new(); //let a = network.make_cell(); //let b = network.make_cell(); //let c = network.make_cell(); //network.write_cell(a, 1.5); //network.write_cell(b, 2.); //network.constraint_product(a, b, c); //network.run(); //let expected = 3.; //let actual = network.read_cell(c).unwrap(); //assert_eq!(expected, actual); //} //#[test] //fn test_network_f64_constraint_product_b() { //let mut network:Network<f64> = Network::new(); //let a = network.make_cell(); //let b = network.make_cell(); //let c = network.make_cell(); //network.write_cell(a, 1.5); //network.write_cell(c, 3.); //network.constraint_product(a, b, c); //network.run(); //let expected = 2.; //let actual = network.read_cell(b).unwrap(); //assert_eq!(expected, actual); //} //#[test] //fn test_network_f64_constraint_product_c() { //let mut network:Network<f64> = Network::new(); //let a = network.make_cell(); //let b = network.make_cell(); //let c = network.make_cell(); //network.write_cell(b, 2.); //network.write_cell(c, 3.); //network.constraint_product(a, b, c); //network.run(); //let expected = 1.5; //let actual = network.read_cell(a).unwrap(); //assert_eq!(expected, actual); //} //#[test] //fn test_network_f64_constraint_product_triangle() { //let mut network:Network<f64> = Network::new(); //let ratio = network.make_cell(); //let a = network.make_cell(); //let b = network.make_cell(); //let c = network.make_cell(); //let d = network.make_cell(); //network.write_cell(a, 2.); //network.write_cell(b, 4.); //network.write_cell(c, 3.); //network.write_cell(d, 6.); //network.constraint_product(a, ratio, b); //network.constraint_product(c, ratio, d); //network.run(); //let expected = 2.; //let actual = network.read_cell(ratio).unwrap(); //assert_eq!(expected, actual); //} //#[test] //fn test_network_f64_constraint_product_triangle_2() { //let mut network:Network<f64> = Network::new(); //let ratio = network.make_cell(); //let a = network.make_cell(); //let b = network.make_cell(); //let c = network.make_cell(); //let d = network.make_cell(); //network.write_cell(ratio, 2.); //network.write_cell(a, 2.); //network.write_cell(b, 4.); //network.write_cell(c, 3.); //network.constraint_product(a, ratio, b); //network.constraint_product(c, ratio, d); //network.run(); //let expected = 6.; //let actual = network.read_cell(d).unwrap(); //assert_eq!(expected, actual); //} //#[test] //fn test_network_f64_constraint_similar_triangles() { //let mut network:Network<f64> = Network::new(); //let a = network.make_cell(); //let b = network.make_cell(); //let c = network.make_cell(); //let d = network.make_cell(); //network.write_cell(a, 2.); //network.write_cell(b, 4.); //network.write_cell(c, 3.); //network.constraint_similar_triangles(a, b, c, d); //network.run(); //let expected = 6.; //let actual = network.read_cell(d).unwrap(); //assert_eq!(expected, actual); //} //////TODO test contradictions
extern crate roaring; use roaring::RoaringBitmap; #[test] fn array_not() { let sup: RoaringBitmap<u32> = (0..2000u32).collect(); let sub: RoaringBitmap<u32> = (1000..3000u32).collect(); assert_eq!(sub.is_subset(&sup), false); assert_eq!(sub.is_subset_opt(&sup), false); } #[test] fn array() { let sup: RoaringBitmap<u32> = (0..4000u32).collect(); let sub: RoaringBitmap<u32> = (2000..3000u32).collect(); assert_eq!(sub.is_subset(&sup), true); assert_eq!(sub.is_subset_opt(&sup), true); } #[test] fn array_bitmap_not() { let sup: RoaringBitmap<u32> = (0..2000u32).collect(); let sub: RoaringBitmap<u32> = (1000..15000u32).collect(); assert_eq!(sub.is_subset(&sup), false); assert_eq!(sub.is_subset_opt(&sup), false); } #[test] fn bitmap_not() { let sup: RoaringBitmap<u32> = (0..6000u32).collect(); let sub: RoaringBitmap<u32> = (4000..10000u32).collect(); assert_eq!(sub.is_subset(&sup), false); assert_eq!(sub.is_subset_opt(&sup), false); } #[test] fn bitmap() { let sup: RoaringBitmap<u32> = (0..20000u32).collect(); let sub: RoaringBitmap<u32> = (5000..15000u32).collect(); assert_eq!(sub.is_subset(&sup), true); assert_eq!(sub.is_subset_opt(&sup), true); } #[test] fn bitmap_array_not() { let sup: RoaringBitmap<u32> = (0..20000u32).collect(); let sub: RoaringBitmap<u32> = (19000..21000u32).collect(); assert_eq!(sub.is_subset(&sup), false); assert_eq!(sub.is_subset_opt(&sup), false); } #[test] fn bitmap_array() { let sup: RoaringBitmap<u32> = (0..20000u32).collect(); let sub: RoaringBitmap<u32> = (18000..20000u32).collect(); assert_eq!(sub.is_subset(&sup), true); assert_eq!(sub.is_subset_opt(&sup), true); } #[test] fn arrays_not() { let sup: RoaringBitmap<u32> = (0..2000u32).chain(1_000_000..1_002_000u32).collect(); let sub: RoaringBitmap<u32> = (100_000..102_000u32).chain(1_100_000..1_102_000u32).collect(); assert_eq!(sub.is_subset(&sup), false); assert_eq!(sub.is_subset_opt(&sup), false); } #[test] fn arrays() { let sup: RoaringBitmap<u32> = (0..3000u32).chain(100000..103000u32).collect(); let sub: RoaringBitmap<u32> = (0..2000u32).chain(100000..102000u32).collect(); assert_eq!(sub.is_subset(&sup), true); assert_eq!(sub.is_subset_opt(&sup), true); } #[test] fn bitmaps_not() { let sup: RoaringBitmap<u32> = (0..6000u32).chain(1000000..1006000u32).chain(2000000..2010000u32).collect(); let sub: RoaringBitmap<u32> = (100000..106000u32).chain(1100000..1106000u32).collect(); assert_eq!(sub.is_subset(&sup), false); assert_eq!(sub.is_subset_opt(&sup), false); } #[test] fn bitmaps() { let sup: RoaringBitmap<u32> = (0..1_000_000u32).chain(2_000_000..2_010_000u32).collect(); let sub: RoaringBitmap<u32> = (0..10_000u32).chain(500_000..510_000u32).collect(); assert_eq!(sub.is_subset(&sup), true); assert_eq!(sub.is_subset_opt(&sup), true); }
use std::ffi::CStr; use std::fmt; use std::os::raw::{c_char, c_int, c_void}; use crate::error::{Error, Result}; use crate::panic; /// The result of a successful name-info lookup. #[derive(Clone, Copy, Debug)] pub struct NameInfoResult<'a> { node: Option<&'a c_char>, service: Option<&'a c_char>, } impl<'a> NameInfoResult<'a> { fn new(node: Option<&'a c_char>, service: Option<&'a c_char>) -> Self { NameInfoResult { node, service } } /// Returns the node from this `NameInfoResult`. /// /// In practice this is very likely to be a valid UTF-8 string, but the underlying `c-ares` /// library does not guarantee this - so we leave it to users to decide whether they prefer a /// fallible conversion, a lossy conversion, or something else altogether. pub fn node(&self) -> Option<&CStr> { self.node.map(|string| unsafe { CStr::from_ptr(string) }) } /// Returns the service from this `NameInfoResult`. /// /// In practice this is very likely to be a valid UTF-8 string, but the underlying `c-ares` /// library does not guarantee this - so we leave it to users to decide whether they prefer a /// fallible conversion, a lossy conversion, or something else altogether. pub fn service(&self) -> Option<&CStr> { self.service.map(|string| unsafe { CStr::from_ptr(string) }) } } impl<'a> fmt::Display for NameInfoResult<'a> { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { let node = self .node() .map(|cstr| cstr.to_str().unwrap_or("<not utf8>")) .unwrap_or("<None>"); write!(fmt, "Node: {node}, ")?; let service = self .service() .map(|cstr| cstr.to_str().unwrap_or("<not utf8>")) .unwrap_or("<None>"); write!(fmt, "Service: {service}") } } unsafe impl<'a> Send for NameInfoResult<'a> {} unsafe impl<'a> Sync for NameInfoResult<'a> {} pub(crate) unsafe extern "C" fn get_name_info_callback<F>( arg: *mut c_void, status: c_int, _timeouts: c_int, node: *mut c_char, service: *mut c_char, ) where F: FnOnce(Result<NameInfoResult>) + Send + 'static, { panic::catch(|| { let result = if status == c_ares_sys::ARES_SUCCESS { let name_info_result = NameInfoResult::new(node.as_ref(), service.as_ref()); Ok(name_info_result) } else { Err(Error::from(status)) }; let handler = Box::from_raw(arg as *mut F); handler(result); }); }
use std::time::Duration; use hal::prelude::*; use rppal::hal::Timer; use rppal::pwm::{Channel, Error, Polarity, Pwm}; const EPSILON: Duration = Duration::from_micros(4); fn sleep(delay: Duration) { let mut timer = Timer::new(); timer.start(delay - EPSILON); block!(timer.wait()).unwrap(); } fn space(pwm: &Pwm, delay: Duration) -> Result<(), Error> { pwm.disable()?; if delay.as_micros() > 0 { sleep(delay); } Ok(()) } fn mark(pwm: &Pwm, delay: Duration) -> Result<(), Error> { pwm.enable()?; if delay.as_micros() > 0 { sleep(delay); } Ok(()) } // Adapted from: https://github.com/z3t0/Arduino-IRremote/blob/master/irSend.cpp pub fn send(buffer: &Vec<Duration>) -> Result<(), Error> { let pwm = Pwm::with_frequency(Channel::Pwm0, 38_000.0, 0.5, Polarity::Normal, false)?; for (i, &delay) in buffer.iter().enumerate() { if i & 1 == 1 { space(&pwm, delay)?; } else { mark(&pwm, delay)?; } } pwm.disable()?; Ok(()) }
pub mod classical; pub mod independent;
use crate::aoc_utils::read_input; pub fn run(input_filename: &str) { let input = read_input(input_filename); let mut line_number: i32 = 0; let mut instructions: Vec<Instruction> = vec![]; for line_str in input.lines() { instructions.push(read_operation(line_number, line_str)); line_number += 1; } part1(&instructions); part2(&instructions); } #[derive(Debug, PartialEq)] // Added so you can debug print this enum enum OperationType { Nop, Acc, Jmp, } #[allow(dead_code)] struct Instruction { line: i32, operation: OperationType, number: i32, } impl std::fmt::Debug for Instruction { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> { f.debug_struct("Bag") .field("token", &self.operation) .field("number", &self.number) .finish() } } fn string_to_operation_type(in_str: &str) -> OperationType { let operation; match in_str.to_lowercase().as_ref() { "acc" => operation = OperationType::Acc, "jmp" => operation = OperationType::Jmp, "nop" => operation = OperationType::Nop, _ => { println!( "Unkown operation {}, falling back to nop", in_str.to_lowercase() ); operation = OperationType::Nop; } } return operation; } fn read_operation(line_number: i32, line: &str) -> Instruction { let parts: Vec<&str> = line.trim().split(' ').collect(); assert_eq!(parts.len(), 2, "Invalid input received"); let operation = string_to_operation_type(parts[0]); return Instruction { line: line_number, operation: operation, number: parts[1].parse().unwrap(), }; } fn run_program(instructions: &Vec<Instruction>) -> (i32, i32) { let mut acc: i32 = 0; let mut line: i32 = 0; let mut executed: Vec<i32> = vec![]; loop { let instruction = &instructions[line as usize]; executed.push(line); match instruction.operation { OperationType::Nop => { line += 1; } OperationType::Acc => { acc += instruction.number; line += 1; } OperationType::Jmp => { line += instruction.number; } } if line < 0 || line >= instructions.len() as i32 { break; } if executed.contains(&line) { break; } } return (acc, line); } fn part1(instructions: &Vec<Instruction>) { let (acc, _last_line) = run_program(instructions); println!("Part 1: {}", acc); } fn part2(instructions: &Vec<Instruction>) { let mut skip_index = 0; loop { let mut amended_instructions: Vec<Instruction> = vec![]; for (index, instruction) in instructions.iter().enumerate() { if skip_index == index { amended_instructions.push(Instruction { operation: { match instruction.operation { OperationType::Nop => OperationType::Jmp, OperationType::Acc => OperationType::Acc, OperationType::Jmp => OperationType::Nop, } }, number: instruction.number, line: instruction.line, }); } else { amended_instructions.push(Instruction { operation: { match instruction.operation { OperationType::Nop => OperationType::Nop, OperationType::Acc => OperationType::Acc, OperationType::Jmp => OperationType::Jmp, } }, number: instruction.number, line: instruction.line, }); } } let (acc, last_line) = run_program(&amended_instructions); if last_line >= instructions.len() as i32 { println!("Part 2: {}", acc); break; } skip_index += 1; if skip_index >= instructions.len() as i32 as usize { println!("No solution found?"); break; } } }
#[doc = "Reader of register FDCAN_TXBRP"] pub type R = crate::R<u32, super::FDCAN_TXBRP>; #[doc = "TRP\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u32)] pub enum TRP_A { #[doc = "0: No transmission request\r\n pending"] B_0X0 = 0, #[doc = "1: Transmission request\r\n pending"] B_0X1 = 1, } impl From<TRP_A> for u32 { #[inline(always)] fn from(variant: TRP_A) -> Self { variant as _ } } #[doc = "Reader of field `TRP`"] pub type TRP_R = crate::R<u32, TRP_A>; impl TRP_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u32, TRP_A> { use crate::Variant::*; match self.bits { 0 => Val(TRP_A::B_0X0), 1 => Val(TRP_A::B_0X1), i => Res(i), } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == TRP_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == TRP_A::B_0X1 } } impl R { #[doc = "Bits 0:31 - TRP"] #[inline(always)] pub fn trp(&self) -> TRP_R { TRP_R::new((self.bits & 0xffff_ffff) as u32) } }
//! Types for PCD metadata. use std::{iter::FromIterator, ops::Index}; /// The struct keep meta data of PCD file. #[derive(Debug, Clone, PartialEq)] pub struct PcdMeta { pub version: String, pub width: u64, pub height: u64, pub viewpoint: ViewPoint, pub num_points: u64, pub data: DataKind, pub field_defs: Schema, } /// Represents VIEWPOINT field in meta data. #[derive(Debug, Clone, PartialEq)] pub struct ViewPoint { pub tx: f64, pub ty: f64, pub tz: f64, pub qw: f64, pub qx: f64, pub qy: f64, pub qz: f64, } impl Default for ViewPoint { fn default() -> Self { ViewPoint { tx: 0.0, ty: 0.0, tz: 0.0, qw: 1.0, qx: 0.0, qy: 0.0, qz: 0.0, } } } /// The enum indicates whether the point cloud data is encoded in Ascii or binary. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum DataKind { Ascii, Binary, } /// The enum specifies one of signed, unsigned integers, and floating point number type to the field. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum TypeKind { I, U, F, } /// The enum specifies the exact type for each PCD field. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum ValueKind { U8, U16, U32, I8, I16, I32, F32, F64, } /// Define the properties of a PCD field. #[derive(Debug, Clone, PartialEq, Eq)] pub struct FieldDef { pub name: String, pub kind: ValueKind, pub count: u64, } /// Define the schema of PCD format. #[derive(Debug, Clone, PartialEq, Eq)] pub struct Schema { pub fields: Vec<FieldDef>, } impl Schema { pub fn is_empty(&self) -> bool { self.fields.is_empty() } pub fn len(&self) -> usize { self.fields.len() } pub fn iter(&self) -> std::slice::Iter<'_, FieldDef> { self.into_iter() } } impl Index<usize> for Schema { type Output = FieldDef; fn index(&self, index: usize) -> &Self::Output { self.fields.index(index) } } impl IntoIterator for Schema { type Item = FieldDef; type IntoIter = std::vec::IntoIter<FieldDef>; fn into_iter(self) -> Self::IntoIter { self.fields.into_iter() } } impl<'a> IntoIterator for &'a Schema { type Item = &'a FieldDef; type IntoIter = std::slice::Iter<'a, FieldDef>; fn into_iter(self) -> Self::IntoIter { self.fields.iter() } } impl FromIterator<(String, ValueKind, u64)> for Schema { fn from_iter<T: IntoIterator<Item = (String, ValueKind, u64)>>(iter: T) -> Self { let fields = iter .into_iter() .map(|(name, kind, count)| FieldDef { name, kind, count }) .collect(); Self { fields } } } impl<'a> FromIterator<(&'a str, ValueKind, u64)> for Schema { fn from_iter<T: IntoIterator<Item = (&'a str, ValueKind, u64)>>(iter: T) -> Self { iter.into_iter() .map(|(name, kind, count)| (name.to_string(), kind, count)) .collect() } } impl FromIterator<FieldDef> for Schema { fn from_iter<T: IntoIterator<Item = FieldDef>>(iter: T) -> Self { Self { fields: iter.into_iter().collect(), } } } impl<'a> FromIterator<&'a FieldDef> for Schema { fn from_iter<T: IntoIterator<Item = &'a FieldDef>>(iter: T) -> Self { Self { fields: iter.into_iter().map(|field| field.to_owned()).collect(), } } }
pub mod project; use self::project::*; use assert_cmd::prelude::*; use predicates::prelude::*; use std::fs; use std::path::Path; use std::process::Command; fn cargo_fuzz() -> Command { Command::cargo_bin("cargo-fuzz").unwrap() } #[test] fn help() { cargo_fuzz().arg("help").assert().success(); } #[test] fn init() { let project = project("init").build(); project.cargo_fuzz().arg("init").assert().success(); assert!(project.fuzz_dir().is_dir()); assert!(project.fuzz_cargo_toml().is_file()); assert!(project.fuzz_targets_dir().is_dir()); assert!(project.fuzz_target_path("fuzz_target_1").is_file()); project .cargo_fuzz() .arg("run") .arg("fuzz_target_1") .arg("--") .arg("-runs=1") .assert() .success(); } #[test] fn init_with_target() { let project = project("init_with_target").build(); project .cargo_fuzz() .arg("init") .arg("-t") .arg("custom_target_name") .assert() .success(); assert!(project.fuzz_dir().is_dir()); assert!(project.fuzz_cargo_toml().is_file()); assert!(project.fuzz_targets_dir().is_dir()); assert!(project.fuzz_target_path("custom_target_name").is_file()); project .cargo_fuzz() .arg("run") .arg("custom_target_name") .arg("--") .arg("-runs=1") .assert() .success(); } #[test] fn init_twice() { let project = project("init_twice").build(); // First init should succeed and make all the things. project.cargo_fuzz().arg("init").assert().success(); assert!(project.fuzz_dir().is_dir()); assert!(project.fuzz_cargo_toml().is_file()); assert!(project.fuzz_targets_dir().is_dir()); assert!(project.fuzz_target_path("fuzz_target_1").is_file()); // Second init should fail. project .cargo_fuzz() .arg("init") .assert() .stderr(predicates::str::contains("File exists (os error 17)").and( predicates::str::contains(format!( "failed to create directory {}", project.fuzz_dir().display() )), )) .failure(); } #[test] fn init_finds_parent_project() { let project = project("init_finds_parent_project").build(); project .cargo_fuzz() .current_dir(project.root().join("src")) .arg("init") .assert() .success(); assert!(project.fuzz_dir().is_dir()); assert!(project.fuzz_cargo_toml().is_file()); assert!(project.fuzz_targets_dir().is_dir()); assert!(project.fuzz_target_path("fuzz_target_1").is_file()); } #[test] fn add() { let project = project("add").with_fuzz().build(); project .cargo_fuzz() .arg("add") .arg("new_fuzz_target") .assert() .success(); assert!(project.fuzz_target_path("new_fuzz_target").is_file()); assert!(project.fuzz_cargo_toml().is_file()); let cargo_toml = fs::read_to_string(project.fuzz_cargo_toml()).unwrap(); let expected_bin_attrs = "test = false\ndoc = false"; assert!(cargo_toml.contains(expected_bin_attrs)); project .cargo_fuzz() .arg("run") .arg("new_fuzz_target") .arg("--") .arg("-runs=1") .assert() .success(); } #[test] fn add_twice() { let project = project("add").with_fuzz().build(); project .cargo_fuzz() .arg("add") .arg("new_fuzz_target") .assert() .success(); assert!(project.fuzz_target_path("new_fuzz_target").is_file()); project .cargo_fuzz() .arg("add") .arg("new_fuzz_target") .assert() .stderr( predicate::str::contains("could not add target") .and(predicate::str::contains("File exists (os error 17)")), ) .failure(); } #[test] fn list() { let project = project("add").with_fuzz().build(); // Create some targets. project.cargo_fuzz().arg("add").arg("c").assert().success(); project.cargo_fuzz().arg("add").arg("b").assert().success(); project.cargo_fuzz().arg("add").arg("a").assert().success(); // Make sure that we can list our targets, and that they're always sorted. project .cargo_fuzz() .arg("list") .assert() .stdout("a\nb\nc\n") .success(); } #[test] fn run_no_crash() { let project = project("run_no_crash") .with_fuzz() .fuzz_target( "no_crash", r#" #![no_main] use libfuzzer_sys::fuzz_target; fuzz_target!(|data: &[u8]| { #[cfg(fuzzing_repro)] eprintln!("Reproducing a crash"); run_no_crash::pass_fuzzing(data); }); "#, ) .build(); project .cargo_fuzz() .arg("run") .arg("no_crash") .arg("--") .arg("-runs=1000") .assert() .stderr( predicate::str::contains("Done 1000 runs") .and(predicate::str::contains("Reproducing a crash").not()), ) .success(); } #[test] fn run_with_crash() { let project = project("run_with_crash") .with_fuzz() .fuzz_target( "yes_crash", r#" #![no_main] use libfuzzer_sys::fuzz_target; fuzz_target!(|data: &[u8]| { run_with_crash::fail_fuzzing(data); }); "#, ) .build(); project .cargo_fuzz() .arg("run") .arg("yes_crash") .arg("--") .arg("-runs=1000") .env("RUST_BACKTRACE", "1") .assert() .stderr( predicate::str::contains("panicked at 'I'm afraid of number 7'") .and(predicate::str::contains("ERROR: libFuzzer: deadly signal")) .and(predicate::str::contains("run_with_crash::fail_fuzzing")) .and(predicate::str::contains( "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€\n\ \n\ Failing input:\n\ \n\ \tfuzz/artifacts/yes_crash/crash-" )) .and(predicate::str::contains("Output of `std::fmt::Debug`:")) .and(predicate::str::contains( "Reproduce with:\n\ \n\ \tcargo fuzz run yes_crash fuzz/artifacts/yes_crash/crash-" )) .and(predicate::str::contains( "Minimize test case with:\n\ \n\ \tcargo fuzz tmin yes_crash fuzz/artifacts/yes_crash/crash-" )), ) .failure(); } #[test] fn run_with_coverage() { let target = "with_coverage"; let project = project("run_with_coverage") .with_fuzz() .fuzz_target( target, r#" #![no_main] use libfuzzer_sys::fuzz_target; fuzz_target!(|data: &[u8]| { println!("{:?}", data); }); "#, ) .build(); project .cargo_fuzz() .arg("run") .arg(target) .arg("--") .arg("-runs=100") .assert() .stderr(predicate::str::contains("Done 100 runs")) .success(); project .cargo_fuzz() .arg("coverage") .arg(target) .assert() .stderr(predicate::str::contains("Coverage data merged and saved")) .success(); let profdata_file = project.fuzz_coverage_dir(target).join("coverage.profdata"); assert!(profdata_file.exists(), "Coverage data file not generated"); } #[test] fn run_without_sanitizer_with_crash() { let project = project("run_without_sanitizer_with_crash") .with_fuzz() .fuzz_target( "yes_crash", r#" #![no_main] use libfuzzer_sys::fuzz_target; fuzz_target!(|data: &[u8]| { run_without_sanitizer_with_crash::fail_fuzzing(data); }); "#, ) .build(); project .cargo_fuzz() .arg("run") .arg("yes_crash") .arg("--") .arg("-runs=1000") .arg("-sanitizer=none") .env("RUST_BACKTRACE", "1") .assert() .stderr( predicate::str::contains("panicked at 'I'm afraid of number 7'") .and(predicate::str::contains("ERROR: libFuzzer: deadly signal")) .and(predicate::str::contains("run_without_sanitizer_with_crash::fail_fuzzing")) .and(predicate::str::contains( "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€\n\ \n\ Failing input:\n\ \n\ \tfuzz/artifacts/yes_crash/crash-" )) .and(predicate::str::contains("Output of `std::fmt::Debug`:")) .and(predicate::str::contains( "Reproduce with:\n\ \n\ \tcargo fuzz run yes_crash fuzz/artifacts/yes_crash/crash-" )) .and(predicate::str::contains( "Minimize test case with:\n\ \n\ \tcargo fuzz tmin yes_crash fuzz/artifacts/yes_crash/crash-" )), ) .failure(); } // TODO: these msan tests are crashing `rustc` in CI: // https://github.com/rust-fuzz/cargo-fuzz/issues/323 // // #[test] // fn run_with_msan_no_crash() { // let project = project("run_with_msan_no_crash") // .with_fuzz() // .fuzz_target( // "msan_no_crash", // r#" // #![no_main] // use libfuzzer_sys::fuzz_target; // // fuzz_target!(|data: &[u8]| { // // get data from fuzzer and print it // // to force a memory access that cannot be optimized out // if let Some(x) = data.get(0) { // dbg!(x); // } // }); // "#, // ) // .build(); // // project // .cargo_fuzz() // .arg("run") // .arg("--sanitizer=memory") // .arg("msan_no_crash") // .arg("--") // .arg("-runs=1000") // .assert() // .stderr(predicate::str::contains("Done 1000 runs")) // .success(); // } // // #[test] // fn run_with_msan_with_crash() { // let project = project("run_with_msan_with_crash") // .with_fuzz() // .fuzz_target( // "msan_with_crash", // r#" // #![no_main] // use libfuzzer_sys::fuzz_target; // // fuzz_target!(|data: &[u8]| { // let test_data: Vec<u8> = Vec::with_capacity(4); // let uninitialized_value = unsafe {test_data.get_unchecked(0)}; // // prevent uninit read from being optimized out // println!("{}", uninitialized_value); // }); // "#, // ) // .build(); // // project // .cargo_fuzz() // .arg("run") // .arg("--sanitizer=memory") // .arg("msan_with_crash") // .arg("--") // .arg("-runs=1000") // .assert() // .stderr( // predicate::str::contains("MemorySanitizer: use-of-uninitialized-value") // .and(predicate::str::contains( // "Reproduce with:\n\ // \n\ // \tcargo fuzz run --sanitizer=memory msan_with_crash fuzz/artifacts/msan_with_crash/crash-", // )) // .and(predicate::str::contains( // "Minimize test case with:\n\ // \n\ // \tcargo fuzz tmin --sanitizer=memory msan_with_crash fuzz/artifacts/msan_with_crash/crash-", // )), // ) // .failure(); // } #[test] fn run_one_input() { let corpus = Path::new("fuzz").join("corpus").join("run_one"); let project = project("run_one_input") .with_fuzz() .fuzz_target( "run_one", r#" #![no_main] use libfuzzer_sys::fuzz_target; fuzz_target!(|data: &[u8]| { #[cfg(fuzzing_repro)] eprintln!("Reproducing a crash"); assert!(data.is_empty()); }); "#, ) .file(corpus.join("pass"), "") .file(corpus.join("fail"), "not empty") .build(); project .cargo_fuzz() .arg("run") .arg("run_one") .arg(corpus.join("pass")) .assert() .stderr( predicate::str::contains("Running 1 inputs 1 time(s) each.") .and(predicate::str::contains( "Running: fuzz/corpus/run_one/pass", )) .and(predicate::str::contains("Reproducing a crash")), ) .success(); } #[test] fn run_a_few_inputs() { let corpus = Path::new("fuzz").join("corpus").join("run_few"); let project = project("run_a_few_inputs") .with_fuzz() .fuzz_target( "run_few", r#" #![no_main] use libfuzzer_sys::fuzz_target; fuzz_target!(|data: &[u8]| { assert!(data.len() != 4); }); "#, ) .file(corpus.join("pass-0"), "") .file(corpus.join("pass-1"), "1") .file(corpus.join("pass-2"), "12") .file(corpus.join("pass-3"), "123") .file(corpus.join("fail"), "fail") .build(); project .cargo_fuzz() .arg("run") .arg("run_few") .arg(corpus.join("pass-0")) .arg(corpus.join("pass-1")) .arg(corpus.join("pass-2")) .arg(corpus.join("pass-3")) .assert() .stderr( predicate::str::contains("Running 4 inputs 1 time(s) each.").and( predicate::str::contains("Running: fuzz/corpus/run_few/pass"), ), ) .success(); } #[test] fn run_alt_corpus() { let corpus = Path::new("fuzz").join("corpus").join("run_alt"); let alt_corpus = Path::new("fuzz").join("alt-corpus").join("run_alt"); let project = project("run_alt_corpus") .with_fuzz() .fuzz_target( "run_alt", r#" #![no_main] use libfuzzer_sys::fuzz_target; fuzz_target!(|data: &[u8]| { assert!(data.len() <= 1); }); "#, ) .file(corpus.join("fail"), "fail") .file(alt_corpus.join("pass-0"), "0") .file(alt_corpus.join("pass-1"), "1") .file(alt_corpus.join("pass-2"), "2") .build(); project .cargo_fuzz() .arg("run") .arg("run_alt") .arg(&alt_corpus) .arg("--") .arg("-runs=0") .assert() .stderr( predicate::str::contains("3 files found in fuzz/alt-corpus/run_alt") .and(predicate::str::contains("fuzz/corpus/run_alt").not()) // libFuzzer will always test the empty input, so the number of // runs performed is always one more than the number of files in // the corpus. .and(predicate::str::contains("Done 4 runs in")), ) .success(); } #[test] fn debug_fmt() { let corpus = Path::new("fuzz").join("corpus").join("debugfmt"); let project = project("debugfmt") .with_fuzz() .fuzz_target( "debugfmt", r#" #![no_main] use libfuzzer_sys::fuzz_target; use libfuzzer_sys::arbitrary::{Arbitrary, Unstructured, Result}; #[derive(Debug)] pub struct Rgb { r: u8, g: u8, b: u8, } impl<'a> Arbitrary<'a> for Rgb { fn arbitrary(raw: &mut Unstructured<'a>) -> Result<Self> { let mut buf = [0; 3]; raw.fill_buffer(&mut buf)?; let r = buf[0]; let g = buf[1]; let b = buf[2]; Ok(Rgb { r, g, b }) } } fuzz_target!(|data: Rgb| { let _ = data; }); "#, ) .file(corpus.join("0"), "111") .build(); project .cargo_fuzz() .arg("fmt") .arg("debugfmt") .arg("fuzz/corpus/debugfmt/0") .assert() .stderr(predicates::str::contains( " Rgb { r: 49, g: 49, b: 49, }", )) .success(); } #[test] fn cmin() { let corpus = Path::new("fuzz").join("corpus").join("foo"); let project = project("cmin") .with_fuzz() .fuzz_target( "foo", r#" #![no_main] use libfuzzer_sys::fuzz_target; fuzz_target!(|data: &[u8]| { let _ = data; }); "#, ) .file(corpus.join("0"), "") .file(corpus.join("1"), "a") .file(corpus.join("2"), "ab") .file(corpus.join("3"), "abc") .file(corpus.join("4"), "abcd") .build(); let corpus_count = || { fs::read_dir(project.root().join("fuzz").join("corpus").join("foo")) .unwrap() .count() }; assert_eq!(corpus_count(), 5); project .cargo_fuzz() .arg("cmin") .arg("foo") .assert() .success(); assert_eq!(corpus_count(), 1); } #[test] fn tmin() { let corpus = Path::new("fuzz").join("corpus").join("i_hate_zed"); let test_case = corpus.join("test-case"); let project = project("tmin") .with_fuzz() .fuzz_target( "i_hate_zed", r#" #![no_main] use libfuzzer_sys::fuzz_target; fuzz_target!(|data: &[u8]| { let s = String::from_utf8_lossy(data); if s.contains('z') { panic!("nooooooooo"); } }); "#, ) .file(&test_case, "pack my box with five dozen liquor jugs") .build(); let test_case = project.root().join(test_case); project .cargo_fuzz() .arg("tmin") .arg("i_hate_zed") .arg("--sanitizer=none") .arg(&test_case) .assert() .stderr( predicates::str::contains("CRASH_MIN: minimizing crash input: ") .and(predicate::str::contains("(1 bytes) caused a crash")) .and(predicate::str::contains( "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€\n\ \n\ Minimized artifact:\n\ \n\ \tfuzz/artifacts/i_hate_zed/minimized-from-")) .and(predicate::str::contains( "Reproduce with:\n\ \n\ \tcargo fuzz run --sanitizer=none i_hate_zed fuzz/artifacts/i_hate_zed/minimized-from-" )), ) .success(); } #[test] fn build_all() { let project = project("build_all").with_fuzz().build(); // Create some targets. project .cargo_fuzz() .arg("add") .arg("build_all_a") .assert() .success(); project .cargo_fuzz() .arg("add") .arg("build_all_b") .assert() .success(); // Build to ensure that the build directory is created and // `fuzz_build_dir()` won't panic. project.cargo_fuzz().arg("build").assert().success(); let build_dir = project.fuzz_build_dir().join("release"); let a_bin = build_dir.join("build_all_a"); let b_bin = build_dir.join("build_all_b"); // Remove the files we just built. fs::remove_file(&a_bin).unwrap(); fs::remove_file(&b_bin).unwrap(); assert!(!a_bin.is_file()); assert!(!b_bin.is_file()); // Test that building all fuzz targets does in fact recreate the files. project.cargo_fuzz().arg("build").assert().success(); assert!(a_bin.is_file()); assert!(b_bin.is_file()); } #[test] fn build_one() { let project = project("build_one").with_fuzz().build(); // Create some targets. project .cargo_fuzz() .arg("add") .arg("build_one_a") .assert() .success(); project .cargo_fuzz() .arg("add") .arg("build_one_b") .assert() .success(); // Build to ensure that the build directory is created and // `fuzz_build_dir()` won't panic. project.cargo_fuzz().arg("build").assert().success(); let build_dir = project.fuzz_build_dir().join("release"); let a_bin = build_dir.join("build_one_a"); let b_bin = build_dir.join("build_one_b"); // Remove the files we just built. fs::remove_file(&a_bin).unwrap(); fs::remove_file(&b_bin).unwrap(); assert!(!a_bin.is_file()); assert!(!b_bin.is_file()); // Test that we can build one and not the other. project .cargo_fuzz() .arg("build") .arg("build_one_a") .assert() .success(); assert!(a_bin.is_file()); assert!(!b_bin.is_file()); } #[test] fn build_dev() { let project = project("build_dev").with_fuzz().build(); // Create some targets. project .cargo_fuzz() .arg("add") .arg("build_dev_a") .assert() .success(); project .cargo_fuzz() .arg("add") .arg("build_dev_b") .assert() .success(); // Build to ensure that the build directory is created and // `fuzz_build_dir()` won't panic. project .cargo_fuzz() .arg("build") .arg("--dev") .assert() .success(); let build_dir = project.fuzz_build_dir().join("debug"); let a_bin = build_dir.join("build_dev_a"); let b_bin = build_dir.join("build_dev_b"); // Remove the files we just built. fs::remove_file(&a_bin).unwrap(); fs::remove_file(&b_bin).unwrap(); assert!(!a_bin.is_file()); assert!(!b_bin.is_file()); // Test that building all fuzz targets does in fact recreate the files. project .cargo_fuzz() .arg("build") .arg("--dev") .assert() .success(); assert!(a_bin.is_file()); assert!(b_bin.is_file()); } #[test] fn build_stripping_dead_code() { let project = project("build_strip").with_fuzz().build(); // Create some targets. project .cargo_fuzz() .arg("add") .arg("build_strip_a") .assert() .success(); project .cargo_fuzz() .arg("build") .arg("--strip-dead-code") .arg("--dev") .assert() .success(); let build_dir = project.fuzz_build_dir().join("debug"); let a_bin = build_dir.join("build_strip_a"); assert!(a_bin.is_file(), "Not a file: {}", a_bin.display()); } #[test] fn run_with_different_fuzz_dir() { let (fuzz_dir, mut project_builder) = project_with_fuzz_dir( "project_likes_to_move_it", Some("dir_likes_to_move_it_move_it"), ); let project = project_builder .with_fuzz() .fuzz_target( "you_like_to_move_it", r#" #![no_main] use libfuzzer_sys::fuzz_target; fuzz_target!(|_data: &[u8]| { }); "#, ) .build(); project .cargo_fuzz() .arg("run") .arg("--fuzz-dir") .arg(fuzz_dir) .arg("you_like_to_move_it") .arg("--") .arg("-runs=1") .assert() .stderr(predicate::str::contains("Done 2 runs")) .success(); } #[test] fn run_diagnostic_contains_fuzz_dir() { let (fuzz_dir, mut project_builder) = project_with_fuzz_dir("run_with_crash", None); let project = project_builder .with_fuzz() .fuzz_target( "yes_crash", r#" #![no_main] use libfuzzer_sys::fuzz_target; fuzz_target!(|data: &[u8]| { run_with_crash::fail_fuzzing(data); }); "#, ) .build(); let run = format!( "cargo fuzz run --fuzz-dir {} yes_crash custom_dir/artifacts/yes_crash", &fuzz_dir ); let tmin = format!( "cargo fuzz tmin --fuzz-dir {} yes_crash custom_dir/artifacts/yes_crash", &fuzz_dir ); project .cargo_fuzz() .arg("run") .arg("--fuzz-dir") .arg(fuzz_dir) .arg("yes_crash") .arg("--") .arg("-runs=1000") .assert() .stderr(predicates::str::contains(run).and(predicate::str::contains(tmin))) .failure(); } fn project_with_fuzz_dir( project_name: &str, fuzz_dir_opt: Option<&str>, ) -> (String, ProjectBuilder) { let fuzz_dir = fuzz_dir_opt.unwrap_or("custom_dir"); let next_root = next_root(); let fuzz_dir_pb = next_root.join(fuzz_dir); let fuzz_dir_sting = fuzz_dir_pb.display().to_string(); let pb = project_with_params(project_name, next_root, fuzz_dir_pb); (fuzz_dir_sting, pb) }
use serde::{Deserialize, Serialize}; use std::{collections::HashMap, error::Error, fmt::Display, fmt, fs::OpenOptions, io::{Read, Write}, path::Path}; #[derive(Debug)] pub enum SettingsError { ParseError(String), WriteParseError(String), WriteError, ReadError, } impl Error for SettingsError {} impl Display for SettingsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { SettingsError::ParseError(msg) => { write!(f, "Unable to parse settings file: {}", msg) }, SettingsError::WriteParseError(msg) => { write!(f, "Unable to write toml to file due to internal parsing error: {}", msg) }, SettingsError::WriteError => { write!(f, "Unable to write to settings file.") }, SettingsError::ReadError => { write!(f, "Unable to read from settings file.") }, } } } #[derive(Serialize, Deserialize, Debug)] pub struct Settings { pub basic: Basic, pub servers: Vec<Server>, pub databases: Vec<DatabaseConnection>, pub event_clauses: Vec<EventClause>, } #[derive(Serialize, Deserialize, Debug)] pub struct Basic { pub target_directory: String, pub directory_per_server: bool } const SETTINGS_FILE: &str = "settings.toml"; impl Settings { // On init we will get settings from the config file. // If it doesnt exist we create a new one and place in the default settings. // The settings file is stored in toml format, if there is an error parsing we will print the error and exit. pub fn init() -> Result<Settings, SettingsError> { // Lets check if the file exists: let settings_file = Path::new(SETTINGS_FILE); if !settings_file.exists() { let settings = Settings::default(); // Lets convert the settings to a toml string and write it to the file. let toml = match toml::to_string(&settings) { Ok(str) => str, Err(e) => { return Err(SettingsError::WriteParseError(e.to_string())); } }; // Lets open and write our file with OpenOptions. let mut f = match OpenOptions::new() .write(true) .create(true) .open(SETTINGS_FILE) { Ok(f) => f, Err(_e) => { return Err(SettingsError::WriteError); } }; // Lets write our toml string to the file. match write!(f, "{}", toml) { Ok(_) => { return Ok(settings); }, Err(_e) => { return Err(SettingsError::WriteError); } }; } else { // Lets read the settings file. let mut f = match OpenOptions::new() .read(true) .open(SETTINGS_FILE) { Ok(f) => f, Err(_e) => { return Err(SettingsError::ReadError); } }; // Lets parse the settings file. let mut toml = String::from(""); let _size = f.read_to_string(&mut toml); let settings:Settings = match toml::from_str(&mut toml) { Ok(settings) => settings, Err(e) => { return Err(SettingsError::ParseError(e.to_string())); } }; Ok(settings) } } } // If we want to store a event into a database we are going to need 2 things: // A database connection. // A clause indicating what events we want to store, and how. // Lets create a clause struct: // This will be used to store the event into the database. // It will contain: // - Event name // - HashMap containing a link between event data and the database columns. // - Database connection id, and the table name. #[derive(Serialize, Deserialize, Debug)] pub struct EventClause { pub event_name: String, pub db_connection_id: String, pub db_table: String, pub event_data_link: HashMap<String, String>, } // Now we want the ability to store multiple database connections, we will give them a unique string id to identify them. // Lets create a struct to hold the database connection information. #[derive(Serialize, Deserialize, Debug)] pub struct DatabaseConnection { pub id: String, pub host: String, pub port: i32, pub user: String, pub password: String, pub database: String } // Represents a AMI Asterisk Server instance to be monitored. #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Server { pub name: String, pub host: String, pub port: u16, pub username: String, pub password: String, } impl Default for Server { fn default() -> Self { Server { name: String::from("Example"), host: String::from("127.0.0.1"), port: 5038, username: String::from("admin"), password: String::from("admin"), } } } impl Default for Settings { fn default() -> Self { Settings { basic: Basic::default(), servers: vec![ Server::default(), ], databases: vec![ DatabaseConnection::default(), DatabaseConnection::default(), ], event_clauses: vec![ EventClause::default(), EventClause::default(), ], } } } impl Default for Basic { fn default() -> Self { Basic { target_directory: String::from("events"), directory_per_server: false } } } impl Default for EventClause { fn default() -> Self { EventClause { event_name: String::from("example"), event_data_link: [ (String::from("example_event_property"), String::from("example_db_column")), (String::from("example_event_property_2"), String::from("example_db_column_2")), ].iter().cloned().collect(), db_connection_id: String::from("example"), db_table: String::from("example") } } } impl Default for DatabaseConnection { fn default() -> Self { DatabaseConnection { id: String::from("example"), host: String::from("example.com"), port: 3306, user: String::from("example"), password: String::from("example"), database: String::from("example") } } }
use std::path::Path; use crate::{NodeJsEngineBuildpack, NodeJsEngineBuildpackError}; use libcnb::additional_buildpack_binary_path; use libcnb::build::BuildContext; use libcnb::data::layer_content_metadata::LayerTypes; use libcnb::generic::GenericMetadata; use libcnb::layer::{Layer, LayerResult, LayerResultBuilder}; /// A layer that sets `WEB_MEMORY` and `WEB_CONCURRENCY` via exec.d pub struct WebEnvLayer; impl Layer for WebEnvLayer { type Buildpack = NodeJsEngineBuildpack; type Metadata = GenericMetadata; fn types(&self) -> LayerTypes { LayerTypes { build: false, launch: true, cache: false, } } fn create( &self, _context: &BuildContext<Self::Buildpack>, _layer_path: &Path, ) -> Result<LayerResult<Self::Metadata>, NodeJsEngineBuildpackError> { LayerResultBuilder::new(GenericMetadata::default()) .exec_d_program("web_env", additional_buildpack_binary_path!("web_env")) .build() } }
#[doc = "Register `CWD` reader"] pub type R = crate::R<CWD_SPEC>; #[doc = "Register `CWD` writer"] pub type W = crate::W<CWD_SPEC>; #[doc = "Field `WDC` reader - WDC"] pub type WDC_R = crate::FieldReader<u16>; #[doc = "Field `WDC` writer - WDC"] pub type WDC_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 16, O, u16>; #[doc = "Field `WDV` reader - WDV"] pub type WDV_R = crate::FieldReader<u16>; #[doc = "Field `WDV` writer - WDV"] pub type WDV_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 16, O, u16>; impl R { #[doc = "Bits 0:15 - WDC"] #[inline(always)] pub fn wdc(&self) -> WDC_R { WDC_R::new((self.bits & 0xffff) as u16) } #[doc = "Bits 16:31 - WDV"] #[inline(always)] pub fn wdv(&self) -> WDV_R { WDV_R::new(((self.bits >> 16) & 0xffff) as u16) } } impl W { #[doc = "Bits 0:15 - WDC"] #[inline(always)] #[must_use] pub fn wdc(&mut self) -> WDC_W<CWD_SPEC, 0> { WDC_W::new(self) } #[doc = "Bits 16:31 - WDV"] #[inline(always)] #[must_use] pub fn wdv(&mut self) -> WDV_W<CWD_SPEC, 16> { WDV_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "Calibration Watchdog Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cwd::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cwd::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct CWD_SPEC; impl crate::RegisterSpec for CWD_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`cwd::R`](R) reader structure"] impl crate::Readable for CWD_SPEC {} #[doc = "`write(|w| ..)` method takes [`cwd::W`](W) writer structure"] impl crate::Writable for CWD_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets CWD to value 0"] impl crate::Resettable for CWD_SPEC { const RESET_VALUE: Self::Ux = 0; }
use std::borrow::Cow; use std::fs; use std::io; use std::path::{Path, PathBuf}; use std::result::Result as ResultOf; use structopt::StructOpt; type Error = Cow<'static, str>; type Result<T = (), E = Error> = ResultOf<T, E>; #[derive(Debug, StructOpt)] #[structopt(name = "bfc")] struct Args { /// Input .bf file #[structopt(parse(from_os_str))] file: PathBuf, /// Output .asm file, default <file> with file extension changed #[structopt(short, long)] out: Option<PathBuf>, /// The tape size to allocate in the output program #[structopt(long, default_value = "1048576")] tape_size: u64, } mod code; use code::Code; fn main() -> Result { let args = Args::from_args(); let code = read_code(&args.file)?; let out_file = args .out .as_ref() .map_or_else(|| Cow::Owned(change_ext(&args.file, "asm")), Cow::Borrowed); compile(code.iter().cloned(), out_file.as_ref(), args.tape_size) .map_err(|err| format!("Error compiling to {}: {}", out_file.display(), err))?; println!("Done! Output has been written to {}.", out_file.display()); println!("You can compile it by running the following commands:"); let obj_file = change_ext(&out_file, "o"); println!( " nasm -f elf64 -o {} {}", change_ext(&out_file, "o").display(), out_file.display() ); println!( " ld -o {} {}", change_ext(&out_file, "exe").display(), obj_file.display() ); Ok(()) } fn change_ext(path: &PathBuf, ext: &str) -> PathBuf { let mut clone = path.clone(); clone.set_extension(ext); clone } fn read_code(file: &PathBuf) -> Result<Vec<Code>> { use std::convert::TryFrom; use std::io::Read; let mut vec = vec![]; for byte in fs::File::open(file) .map_err(|err| format!("Cannot open {}: {}", file.display(), err))? .bytes() { let byte = byte.map_err(|err| format!("Cannot read from {}: {}", file.display(), err))?; if let Ok(code) = Code::try_from(char::from(byte)) { vec.push(code); } } Ok(vec) } fn compile<I, P>(codes: I, out_file: &P, tape_size: u64) -> io::Result<()> where I: IntoIterator<Item = Code>, P: AsRef<Path>, { use std::io::Write; let mut out = fs::File::create(out_file)?; writeln!(out, "section .bss")?; writeln!(out, " tape_ptr RESQ 1")?; writeln!(out, " tape RESB {}", tape_size)?; writeln!(out, "section .text")?; writeln!(out, " global _start")?; writeln!(out, "_start:")?; writeln!(out, " mov EAX, tape+{}", tape_size / 2)?; let mut loop_open = 0usize; let mut loop_close = 0usize; for code in codes { match code { Code::MemInc => writeln!(out, " inc BYTE [EAX]")?, Code::MemDec => writeln!(out, " dec BYTE [EAX]")?, Code::PtrInc => writeln!(out, " inc EAX")?, Code::PtrDec => writeln!(out, " dec EAX")?, Code::SysWrite => { writeln!(out, " mov tape_ptr, eax")?; writeln!(out, " mov eax, [tape_ptr]")?; writeln!(out, " mov ebx [tape_ptr+4]")?; writeln!(out, " mov ecx, [tape_ptr+8]")?; writeln!(out, " mov edx, [tape_ptr+12]")?; writeln!(out, " mov esi, [tape_ptr+16]")?; writeln!(out, " mov edi, [tape_ptr+20]")?; writeln!(out, " int 0x80")?; writeln!(out, " mov [tape_ptr], eax")?; writeln!(out, " mov eax, tape_ptr")?; } Code::SysRead => { writeln!(out, " mov [eax], [[eax]]")?; } Code::LoopStart => { loop_open += 1; writeln!(out, "label_{}:", loop_open)?; } Code::LoopEnd => { loop_close += 1; if loop_close > loop_open { return Err(io::Error::new( io::ErrorKind::Other, "Compile error: Found a `]` code without a matching `[`", ))?; } writeln!(out, " jne label_{}", loop_close)?; } } } if loop_open > loop_close { return Err(io::Error::new( io::ErrorKind::Other, "Compile error: Reached end of file with {} `[` code(s) unclosed", ))?; } Ok(()) }
use ffsvm::{Attribute, Header, ModelFile, SupportVector}; use rand::Rng; pub fn random_dense<'b>(svm_type: &'b str, kernel_type: &'b str, total_sv: u32, attr: u32) -> ModelFile<'b> { let mut rng = rand::thread_rng(); ModelFile { header: Header { svm_type, kernel_type, total_sv, gamma: Some(rng.gen::<f32>()), coef0: Some(rng.gen::<f32>()), degree: Some(rng.gen_range(1, 10)), nr_class: 2, rho: vec![rng.gen::<f64>()], label: vec![0, 1], prob_a: Some(vec![rng.gen::<f64>(), rng.gen::<f64>()]), prob_b: Some(vec![rng.gen::<f64>(), rng.gen::<f64>()]), nr_sv: vec![total_sv / 2, total_sv / 2], }, vectors: (0 .. total_sv) .map(|_| SupportVector { coefs: vec![rng.gen::<f32>()], features: (0 .. attr) .map(|i| Attribute { index: i, value: rng.gen::<f32>(), }) .collect(), }) .collect(), } }
fn main() { // dependencies println!("cargo:rustc-link-search=../kjsl_c_lib/cmake-build-debug/"); println!("cargo:rustc-link-lib=static=kjsl_c_lib"); }
#![no_std] #![no_main] #![feature(custom_test_frameworks)] #![test_runner(glade::test_runner)] #![reexport_test_harness_main = "test_main"] use core::panic::PanicInfo; use glade::{print, println, sprint, sprintln}; #[no_mangle] pub extern "C" fn _start() -> ! { test_main(); glade::hlt_loop(); } #[panic_handler] fn panic(info: &PanicInfo) -> ! { glade::test_panic_handler(info) } #[test_case] fn test_println() { sprint!("test_println... "); println!("test_println output"); sprintln!("[Ok!]"); }
extern crate foodep; fn main() { println!("Hello, world!"); foodep::foo(); }
fn insertion_sort<T: std::cmp::Ord>(arr: &mut [T]) { for i in 1..arr.len() { let mut j = i; while j > 0 && arr[j] < arr[j-1] { arr.swap(j, j-1); j = j-1; } } } fn main() {}
use crate::candidate::{CandidatePairState, CandidateType}; use crate::agent::agent_internal::AgentInternal; use crate::network_type::NetworkType; use std::sync::atomic::Ordering; use tokio::time::Instant; /// Contains ICE candidate pair statistics. pub struct CandidatePairStats { /// The timestamp associated with this struct. pub timestamp: Instant, /// The id of the local candidate. pub local_candidate_id: String, /// The id of the remote candidate. pub remote_candidate_id: String, /// The state of the checklist for the local and remote candidates in a pair. pub state: CandidatePairState, /// It is true when this valid pair that should be used for media, /// if it is the highest-priority one amongst those whose nominated flag is set. pub nominated: bool, /// The total number of packets sent on this candidate pair. pub packets_sent: u32, /// The total number of packets received on this candidate pair. pub packets_received: u32, /// The total number of payload bytes sent on this candidate pair not including headers or /// padding. pub bytes_sent: u64, /// The total number of payload bytes received on this candidate pair not including headers or /// padding. pub bytes_received: u64, /// The timestamp at which the last packet was sent on this particular candidate pair, excluding /// STUN packets. pub last_packet_sent_timestamp: Instant, /// The timestamp at which the last packet was received on this particular candidate pair, /// excluding STUN packets. pub last_packet_received_timestamp: Instant, /// The timestamp at which the first STUN request was sent on this particular candidate pair. pub first_request_timestamp: Instant, /// The timestamp at which the last STUN request was sent on this particular candidate pair. /// The average interval between two consecutive connectivity checks sent can be calculated with /// (last_request_timestamp - first_request_timestamp) / requests_sent. pub last_request_timestamp: Instant, /// Timestamp at which the last STUN response was received on this particular candidate pair. pub last_response_timestamp: Instant, /// The sum of all round trip time measurements in seconds since the beginning of the session, /// based on STUN connectivity check responses (responses_received), including those that reply /// to requests that are sent in order to verify consent. The average round trip time can be /// computed from total_round_trip_time by dividing it by responses_received. pub total_round_trip_time: f64, /// The latest round trip time measured in seconds, computed from both STUN connectivity checks, /// including those that are sent for consent verification. pub current_round_trip_time: f64, /// It is calculated by the underlying congestion control by combining the available bitrate for /// all the outgoing RTP streams using this candidate pair. The bitrate measurement does not /// count the size of the IP or other transport layers like TCP or UDP. It is similar to the /// TIAS defined in RFC 3890, i.e., it is measured in bits per second and the bitrate is /// calculated over a 1 second window. pub available_outgoing_bitrate: f64, /// It is calculated by the underlying congestion control by combining the available bitrate for /// all the incoming RTP streams using this candidate pair. The bitrate measurement does not /// count the size of the IP or other transport layers like TCP or UDP. It is similar to the /// TIAS defined in RFC 3890, i.e., it is measured in bits per second and the bitrate is /// calculated over a 1 second window. pub available_incoming_bitrate: f64, /// The number of times the circuit breaker is triggered for this particular 5-tuple, /// ceasing transmission. pub circuit_breaker_trigger_count: u32, /// The total number of connectivity check requests received (including retransmissions). /// It is impossible for the receiver to tell whether the request was sent in order to check /// connectivity or check consent, so all connectivity checks requests are counted here. pub requests_received: u64, /// The total number of connectivity check requests sent (not including retransmissions). pub requests_sent: u64, /// The total number of connectivity check responses received. pub responses_received: u64, /// The total number of connectivity check responses sent. Since we cannot distinguish /// connectivity check requests and consent requests, all responses are counted. pub responses_sent: u64, /// The total number of connectivity check request retransmissions received. pub retransmissions_received: u64, /// The total number of connectivity check request retransmissions sent. pub retransmissions_sent: u64, /// The total number of consent requests sent. pub consent_requests_sent: u64, /// The timestamp at which the latest valid STUN binding response expired. pub consent_expired_timestamp: Instant, } impl Default for CandidatePairStats { fn default() -> Self { Self { timestamp: Instant::now(), local_candidate_id: String::new(), remote_candidate_id: String::new(), state: CandidatePairState::default(), nominated: false, packets_sent: 0, packets_received: 0, bytes_sent: 0, bytes_received: 0, last_packet_sent_timestamp: Instant::now(), last_packet_received_timestamp: Instant::now(), first_request_timestamp: Instant::now(), last_request_timestamp: Instant::now(), last_response_timestamp: Instant::now(), total_round_trip_time: 0.0, current_round_trip_time: 0.0, available_outgoing_bitrate: 0.0, available_incoming_bitrate: 0.0, circuit_breaker_trigger_count: 0, requests_received: 0, requests_sent: 0, responses_received: 0, responses_sent: 0, retransmissions_received: 0, retransmissions_sent: 0, consent_requests_sent: 0, consent_expired_timestamp: Instant::now(), } } } /// Contains ICE candidate statistics related to the `ICETransport` objects. #[derive(Debug, Clone)] pub struct CandidateStats { // The timestamp associated with this struct. pub timestamp: Instant, /// The candidate id. pub id: String, /// The type of network interface used by the base of a local candidate (the address the ICE /// agent sends from). Only present for local candidates; it's not possible to know what type of /// network interface a remote candidate is using. /// /// Note: This stat only tells you about the network interface used by the first "hop"; it's /// possible that a connection will be bottlenecked by another type of network. For example, /// when using Wi-Fi tethering, the networkType of the relevant candidate would be "wifi", even /// when the next hop is over a cellular connection. pub network_type: NetworkType, /// The IP address of the candidate, allowing for IPv4 addresses and IPv6 addresses, but fully /// qualified domain names (FQDNs) are not allowed. pub ip: String, /// The port number of the candidate. pub port: u16, /// The `Type` field of the ICECandidate. pub candidate_type: CandidateType, /// The `priority` field of the ICECandidate. pub priority: u32, /// The url of the TURN or STUN server indicated in the that translated this IP address. /// It is the url address surfaced in an PeerConnectionICEEvent. pub url: String, /// The protocol used by the endpoint to communicate with the TURN server. This is only present /// for local candidates. Valid values for the TURN url protocol is one of udp, tcp, or tls. pub relay_protocol: String, /// It is true if the candidate has been deleted/freed. For host candidates, this means that any /// network resources (typically a socket) associated with the candidate have been released. For /// TURN candidates, this means the TURN allocation is no longer active. /// /// Only defined for local candidates. For remote candidates, this property is not applicable. pub deleted: bool, } impl Default for CandidateStats { fn default() -> Self { Self { timestamp: Instant::now(), id: String::new(), network_type: NetworkType::default(), ip: String::new(), port: 0, candidate_type: CandidateType::default(), priority: 0, url: String::new(), relay_protocol: String::new(), deleted: false, } } } impl AgentInternal { /// Returns a list of candidate pair stats. pub(crate) async fn get_candidate_pairs_stats(&self) -> Vec<CandidatePairStats> { let checklist = self.agent_conn.checklist.lock().await; let mut res = Vec::with_capacity(checklist.len()); for cp in &*checklist { let stat = CandidatePairStats { timestamp: Instant::now(), local_candidate_id: cp.local.id(), remote_candidate_id: cp.remote.id(), state: cp.state.load(Ordering::SeqCst).into(), nominated: cp.nominated.load(Ordering::SeqCst), ..CandidatePairStats::default() }; res.push(stat); } res } /// Returns a list of local candidates stats. pub(crate) fn get_local_candidates_stats(&self) -> Vec<CandidateStats> { let mut res = Vec::with_capacity(self.local_candidates.len()); for (network_type, local_candidates) in &self.local_candidates { for c in local_candidates { let stat = CandidateStats { timestamp: Instant::now(), id: c.id(), network_type: *network_type, ip: c.address(), port: c.port(), candidate_type: c.candidate_type(), priority: c.priority(), // URL string relay_protocol: "udp".to_owned(), // Deleted bool ..CandidateStats::default() }; res.push(stat); } } res } /// Returns a list of remote candidates stats. pub(crate) fn get_remote_candidates_stats(&self) -> Vec<CandidateStats> { let mut res = Vec::with_capacity(self.remote_candidates.len()); for (network_type, remote_candidates) in &self.remote_candidates { for c in remote_candidates { let stat = CandidateStats { timestamp: Instant::now(), id: c.id(), network_type: *network_type, ip: c.address(), port: c.port(), candidate_type: c.candidate_type(), priority: c.priority(), // URL string relay_protocol: "udp".to_owned(), // Deleted bool ..CandidateStats::default() }; res.push(stat); } } res } }
use crate::initial_load::initial_load; use crate::page_data::page_data; use crate::translations::translations; use actix_files::{Files, NamedFile}; use actix_web::{web, HttpRequest}; use perseus::{ get_render_cfg, html_shell::prep_html_shell, path_prefix::get_path_prefix_server, stores::{ImmutableStore, MutableStore}, ErrorPages, Locales, SsrNode, TemplateMap, TranslationsManager, }; use std::collections::HashMap; use std::fs; /// The options for setting up the Actix Web integration. This should be literally constructed, as nothing is optional. #[derive(Clone)] pub struct Options { /// The location on the filesystem of your JavaScript bundle. pub js_bundle: String, /// The location on the filesystem of your Wasm bundle. pub wasm_bundle: String, /// The location on the filesystem of your `index.html` file that includes the JS bundle. pub index: String, /// A `HashMap` of your app's templates by their paths. pub templates_map: TemplateMap<SsrNode>, /// The locales information for the app. pub locales: Locales, /// The HTML `id` of the element at which to render Perseus. On the server-side, interpolation will be done here in a highly /// efficient manner by not parsing the HTML, so this MUST be of the form `<div id="root_id">` in your markup (double or single /// quotes, `root_id` replaced by what this property is set to). pub root_id: String, /// The location of the JS interop snippets to be served as static files. pub snippets: String, /// The error pages for the app. These will be server-rendered if an initial load fails. pub error_pages: ErrorPages<SsrNode>, /// Directories to serve static content from, mapping URL to folder path. Note that the URL provided will be gated behind /// `.perseus/static/`, and must have a leading `/`. If you're using a CMS instead, you should set these up outside the Perseus /// server (but they might still be on the same machine, you can still add more routes after Perseus is configured). pub static_dirs: HashMap<String, String>, /// A map of URLs to act as aliases for certain static resources. These are particularly designed for things like a site manifest or /// favicons, which should be stored in a static directory, but need to be aliased at a path like `/favicon.ico`. pub static_aliases: HashMap<String, String>, } async fn js_bundle(opts: web::Data<Options>) -> std::io::Result<NamedFile> { NamedFile::open(&opts.js_bundle) } async fn wasm_bundle(opts: web::Data<Options>) -> std::io::Result<NamedFile> { NamedFile::open(&opts.wasm_bundle) } async fn static_alias(opts: web::Data<Options>, req: HttpRequest) -> std::io::Result<NamedFile> { let filename = opts.static_aliases.get(req.path()); let filename = match filename { Some(filename) => filename, // If the path doesn't exist, then the alias is not found None => return Err(std::io::Error::from(std::io::ErrorKind::NotFound)), }; NamedFile::open(filename) } /// Configures an existing Actix Web app for Perseus. This returns a function that does the configuring so it can take arguments. This /// includes a complete wildcard handler (`*`), and so it should be configured after any other routes on your server. pub async fn configurer<M: MutableStore + 'static, T: TranslationsManager + 'static>( opts: Options, immutable_store: ImmutableStore, mutable_store: M, translations_manager: T, ) -> impl Fn(&mut web::ServiceConfig) { let render_cfg = get_render_cfg(&immutable_store) .await .expect("Couldn't get render configuration!"); // Get the index file and inject the render configuration into ahead of time // Anything done here will affect any status code and all loads let index_file = fs::read_to_string(&opts.index).expect("Couldn't get HTML index file!"); let index_with_render_cfg = prep_html_shell(index_file, &render_cfg, get_path_prefix_server()); move |cfg: &mut web::ServiceConfig| { cfg // We implant the render config in the app data for better performance, it's needed on every request .data(render_cfg.clone()) .data(immutable_store.clone()) .data(mutable_store.clone()) .data(translations_manager.clone()) .data(opts.clone()) .data(index_with_render_cfg.clone()) // TODO chunk JS and Wasm bundles // These allow getting the basic app code (not including the static data) // This contains everything in the spirit of a pseudo-SPA .route("/.perseus/bundle.js", web::get().to(js_bundle)) .route("/.perseus/bundle.wasm", web::get().to(wasm_bundle)) // This allows getting the static HTML/JSON of a page // We stream both together in a single JSON object so SSR works (otherwise we'd have request IDs and weird caching...) // A request to this should also provide the template name (routing should only be done once on the client) as a query parameter .route( "/.perseus/page/{locale}/{filename:.*}.json", web::get().to(page_data::<M, T>), ) // This allows the app shell to fetch translations for a given page .route( "/.perseus/translations/{locale}", web::get().to(translations::<T>), ) // This allows gettting JS interop snippets (including ones that are supposedly 'inlined') // These won't change, so they can be set as a filesystem dependency safely .service(Files::new("/.perseus/snippets", &opts.snippets)); // Now we add support for any static content the user wants to provide for (url, static_dir) in opts.static_dirs.iter() { cfg.service(Files::new(&format!("/.perseus/static{}", url), static_dir)); } // And finally add in aliases for static content as necessary for (url, _static_path) in opts.static_aliases.iter() { // This handler indexes the path of the request in `opts.static_aliases` to figure out what to serve cfg.route(url, web::get().to(static_alias)); } // For everything else, we'll serve the app shell directly // This has to be done AFTER everything else, because it will match anything that's left cfg.route("*", web::get().to(initial_load::<M, T>)); } }
use crate::net::TcpStream; use std::future::Future; use std::io; use std::pin::Pin; use std::task::{Context, Poll}; /// A future completing when a stream is ready to use (or failed). #[must_use = "futures do nothing unless you `.await` or poll them"] #[derive(Debug)] pub struct TcpConnectFuture { stream: Option<TcpStream>, } impl TcpConnectFuture { pub(super) fn new(stream: TcpStream) -> Self { Self { stream: Some(stream), } } } impl Future for TcpConnectFuture { type Output = io::Result<TcpStream>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { futures_core::ready!(self.stream.as_mut().expect("can't poll TcpConnectFuture twice").mio_stream.poll_write_ready(cx))?; let stream = self.stream.take().unwrap(); if let Some(e) = stream.mio_stream.io_ref().take_error()? { return Poll::Ready(Err(e)); } Poll::Ready(Ok(stream)) } }
// This file is part of rdma-core. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT. No part of rdma-core, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright ยฉ 2016 The developers of rdma-core. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT. pub const ibv_wc_flags_IBV_WC_GRH: ibv_wc_flags = ibv_wc_flags(1); pub const ibv_wc_flags_IBV_WC_IP_CSUM_OK: ibv_wc_flags = ibv_wc_flags(4); pub const ibv_wc_flags_IBV_WC_TM_DATA_VALID: ibv_wc_flags = ibv_wc_flags(64); pub const ibv_wc_flags_IBV_WC_TM_MATCH: ibv_wc_flags = ibv_wc_flags(32); pub const ibv_wc_flags_IBV_WC_TM_SYNC_REQ: ibv_wc_flags = ibv_wc_flags(16); pub const ibv_wc_flags_IBV_WC_WITH_IMM: ibv_wc_flags = ibv_wc_flags(2); pub const ibv_wc_flags_IBV_WC_WITH_INV: ibv_wc_flags = ibv_wc_flags(8);
//! Utilities for manipulating the data devices //! //! The data device is wayland's abstraction to represent both selection (copy/paste) and //! drag'n'drop actions. This module provides logic to handle this part of the protocol. //! Selection and drag'n'drop are per-seat notions. //! //! This module provides 2 main freestanding functions: //! //! - [`init_data_device`](::wayland::data_device::init_data_device): this function must be called //! during the compositor startup to initialize the data device logic //! - [`set_data_device_focus`](::wayland::data_device::set_data_device_focus): this function sets //! the data device focus for a given seat; you'd typically call it whenever the keyboard focus //! changes, to follow it (for example in the focus hook of your keyboards) //! //! Using these two functions is enough for your clients to be able to interact with each other using //! the data devices. //! //! The module also provides additionnal mechanisms allowing your compositor to see and interact with //! the contents of the data device: //! //! - You can provide a callback closure to [`init_data_device`](::wayland::data_device::init_data_device) //! to peek into the the actions of your clients //! - the freestanding function [`set_data_device_selection`](::wayland::data_device::set_data_device_selection) //! allows you to set the contents of the selection for your clients //! - the freestanding function [`start_dnd`](::wayland::data_device::start_dnd) allows you to initiate a drag'n'drop event from the compositor //! itself and receive interactions of clients with it via an other dedicated callback. //! //! The module also defines the `DnDIconRole` that you need to insert into your compositor roles enum, to //! represent surfaces that are used as a DnD icon. //! //! ## Initialization //! //! ``` //! # extern crate wayland_server; //! # #[macro_use] extern crate smithay; //! use smithay::wayland::data_device::{init_data_device, default_action_chooser, DnDIconRole}; //! # use smithay::wayland::compositor::compositor_init; //! //! // You need to insert the `DndIconRole` into your roles, to handle requests from clients //! // to set a surface as a dnd icon //! define_roles!(Roles => [DnDIcon, DnDIconRole]); //! //! # fn main(){ //! # let mut event_loop = wayland_server::calloop::EventLoop::<()>::new().unwrap(); //! # let mut display = wayland_server::Display::new(event_loop.handle()); //! # let (compositor_token, _, _) = compositor_init::<Roles, _, _>(&mut display, |_, _, _| {}, None); //! // init the data device: //! init_data_device( //! &mut display, // the display //! |dnd_event| { /* a callback to react to client DnD/selection actions */ }, //! default_action_chooser, // a closure to choose the DnD action depending on clients //! // negociation //! compositor_token.clone(), // a compositor token //! None // insert a logger here //! ); //! # } //! ``` use std::cell::RefCell; use std::os::unix::io::RawFd; use std::rc::Rc; use wayland_server::{ protocol::{ wl_data_device, wl_data_device_manager::{self, DndAction}, wl_data_offer, wl_data_source, wl_surface, }, Client, Display, Global, NewResource, }; use crate::wayland::{ compositor::{roles::Role, CompositorToken}, seat::Seat, }; mod data_source; mod dnd_grab; mod server_dnd_grab; pub use self::data_source::{with_source_metadata, SourceMetadata}; pub use self::server_dnd_grab::ServerDndEvent; /// Events that are generated by interactions of the clients with the data device pub enum DataDeviceEvent { /// A client has set the selection NewSelection(Option<wl_data_source::WlDataSource>), /// A client started a drag'n'drop as response to a user pointer action DnDStarted { /// The data source provided by the client /// /// If it is `None`, this means the DnD is restricted to surfaces of the /// same client and the client will manage data transfert by itself. source: Option<wl_data_source::WlDataSource>, /// The icon the client requested to be used to be associated with the cursor icon /// during the drag'n'drop. icon: Option<wl_surface::WlSurface>, }, /// The drag'n'drop action was finished by the user releasing the buttons /// /// At this point, any pointer icon should be removed. /// /// Note that this event will only be genrated for client-initiated drag'n'drop session. DnDDropped, /// A client requested to read the server-set selection SendSelection { /// the requested mime type mime_type: String, /// the fd to write into fd: RawFd, }, } /// The role applied to surfaces used as DnD icons #[derive(Default)] pub struct DnDIconRole; enum Selection { Empty, Client(wl_data_source::WlDataSource), Compositor(SourceMetadata), } struct SeatData { known_devices: Vec<wl_data_device::WlDataDevice>, selection: Selection, log: ::slog::Logger, current_focus: Option<Client>, } impl SeatData { fn set_selection(&mut self, new_selection: Selection) { self.selection = new_selection; self.send_selection(); } fn set_focus(&mut self, new_focus: Option<Client>) { self.current_focus = new_focus; self.send_selection(); } fn send_selection(&mut self) { let client = match self.current_focus.as_ref() { Some(c) => c, None => return, }; // first sanitize the selection, reseting it to null if the client holding // it dropped it let cleanup = if let Selection::Client(ref data_source) = self.selection { !data_source.as_ref().is_alive() } else { false }; if cleanup { self.selection = Selection::Empty; } // then send it if appropriate match self.selection { Selection::Empty => { // send an empty selection for dd in &self.known_devices { // skip data devices not belonging to our client if dd.as_ref().client().map(|c| !c.equals(client)).unwrap_or(true) { continue; } dd.selection(None); } } Selection::Client(ref data_source) => { for dd in &self.known_devices { // skip data devices not belonging to our client if dd.as_ref().client().map(|c| !c.equals(client)).unwrap_or(true) { continue; } let source = data_source.clone(); let log = self.log.clone(); // create a corresponding data offer let offer = client .create_resource::<wl_data_offer::WlDataOffer>(dd.as_ref().version()) .unwrap() .implement_closure( move |req, _offer| match req { wl_data_offer::Request::Receive { fd, mime_type } => { // check if the source and associated mime type is still valid let valid = with_source_metadata(&source, |meta| { meta.mime_types.contains(&mime_type) }) .unwrap_or(false) && source.as_ref().is_alive(); if !valid { // deny the receive debug!(log, "Denying a wl_data_offer.receive with invalid source."); } else { source.send(mime_type, fd); } let _ = ::nix::unistd::close(fd); } _ => { /* seleciton data offers only care about the `receive` event */ } }, None::<fn(_)>, (), ); // advertize the offer to the client dd.data_offer(&offer); with_source_metadata(data_source, |meta| { for mime_type in meta.mime_types.iter().cloned() { offer.offer(mime_type); } }) .unwrap(); dd.selection(Some(&offer)); } } Selection::Compositor(ref meta) => { for dd in &self.known_devices { // skip data devices not belonging to our client if dd.as_ref().client().map(|c| !c.equals(client)).unwrap_or(true) { continue; } let log = self.log.clone(); let offer_meta = meta.clone(); let callback = dd .as_ref() .user_data::<DataDeviceData>() .unwrap() .callback .clone(); // create a corresponding data offer let offer = client .create_resource::<wl_data_offer::WlDataOffer>(dd.as_ref().version()) .unwrap() .implement_closure( move |req, _offer| match req { wl_data_offer::Request::Receive { fd, mime_type } => { // check if the associated mime type is valid if !offer_meta.mime_types.contains(&mime_type) { // deny the receive debug!(log, "Denying a wl_data_offer.receive with invalid source."); let _ = ::nix::unistd::close(fd); } else { (&mut *callback.borrow_mut())(DataDeviceEvent::SendSelection { mime_type, fd, }); } } _ => { /* seleciton data offers only care about the `receive` event */ } }, None::<fn(_)>, (), ); // advertize the offer to the client dd.data_offer(&offer); for mime_type in meta.mime_types.iter().cloned() { offer.offer(mime_type); } dd.selection(Some(&offer)); } } } } } impl SeatData { fn new(log: ::slog::Logger) -> SeatData { SeatData { known_devices: Vec::new(), selection: Selection::Empty, log, current_focus: None, } } } /// Initialize the data device global /// /// You can provide a callback to peek into the actions of your clients over the data devices /// (allowing you to retrieve the current selection buffer, or intercept DnD data). See the /// [`DataDeviceEvent`] type for details about what notifications you can receive. Note that this /// closure will not receive notifications about dnd actions the compositor initiated, see /// [`start_dnd`] for details about that. /// /// You also need to provide a `(DndAction, DndAction) -> DndAction` closure that will arbitrate /// the choice of action resulting from a drag'n'drop session. Its first argument is the set of /// available actions (which is the intersection of the actions supported by the source and targets) /// and the second argument is the preferred action reported by the target. If no action should be /// chosen (and thus the drag'n'drop should abort on drop), return /// [`DndAction::empty()`](wayland_server::protocol::wl_data_device_manager::DndAction::empty). pub fn init_data_device<F, C, R, L>( display: &mut Display, callback: C, action_choice: F, token: CompositorToken<R>, logger: L, ) -> Global<wl_data_device_manager::WlDataDeviceManager> where F: FnMut(DndAction, DndAction) -> DndAction + 'static, C: FnMut(DataDeviceEvent) + 'static, R: Role<DnDIconRole> + 'static, L: Into<Option<::slog::Logger>>, { let log = crate::slog_or_stdlog(logger).new(o!("smithay_module" => "data_device_mgr")); let action_choice = Rc::new(RefCell::new(action_choice)); let callback = Rc::new(RefCell::new(callback)); let global = display.create_global(3, move |new_ddm, _version| { implement_ddm( new_ddm, callback.clone(), action_choice.clone(), token, log.clone(), ); }); global } /// Set the data device focus to a certain client for a given seat pub fn set_data_device_focus(seat: &Seat, client: Option<Client>) { // ensure the seat user_data is ready // TODO: find a better way to retrieve a logger without requiring the user // to provide one ? // This should be a rare path anyway, it is unlikely that a client gets focus // before initializing its data device, which would already init the user_data. seat.user_data().insert_if_missing(|| { RefCell::new(SeatData::new( seat.arc.log.new(o!("smithay_module" => "data_device_mgr")), )) }); let seat_data = seat.user_data().get::<RefCell<SeatData>>().unwrap(); seat_data.borrow_mut().set_focus(client); } /// Set a compositor-provided selection for this seat /// /// You need to provide the available mime types for this selection. /// /// Whenever a client requests to read the selection, your callback will /// receive a [`DataDeviceEvent::SendSelection`] event. pub fn set_data_device_selection(seat: &Seat, mime_types: Vec<String>) { // TODO: same question as in set_data_device_focus seat.user_data().insert_if_missing(|| { RefCell::new(SeatData::new( seat.arc.log.new(o!("smithay_module" => "data_device_mgr")), )) }); let seat_data = seat.user_data().get::<RefCell<SeatData>>().unwrap(); seat_data .borrow_mut() .set_selection(Selection::Compositor(SourceMetadata { mime_types, dnd_action: DndAction::empty(), })); } /// Start a drag'n'drop from a ressource controlled by the compositor /// /// You'll receive events generated by the interaction of clients with your /// drag'n'drop in the provided callback. See [`ServerDndEvent`] for details about /// which events can be generated and what response is expected from you to them. pub fn start_dnd<C>(seat: &Seat, serial: u32, metadata: SourceMetadata, callback: C) where C: FnMut(ServerDndEvent) + 'static, { // TODO: same question as in set_data_device_focus seat.user_data().insert_if_missing(|| { RefCell::new(SeatData::new( seat.arc.log.new(o!("smithay_module" => "data_device_mgr")), )) }); if let Some(pointer) = seat.get_pointer() { pointer.set_grab( server_dnd_grab::ServerDnDGrab::new(metadata, seat.clone(), Rc::new(RefCell::new(callback))), serial, ); return; } } fn implement_ddm<F, C, R>( new_ddm: NewResource<wl_data_device_manager::WlDataDeviceManager>, callback: Rc<RefCell<C>>, action_choice: Rc<RefCell<F>>, token: CompositorToken<R>, log: ::slog::Logger, ) -> wl_data_device_manager::WlDataDeviceManager where F: FnMut(DndAction, DndAction) -> DndAction + 'static, C: FnMut(DataDeviceEvent) + 'static, R: Role<DnDIconRole> + 'static, { use self::wl_data_device_manager::Request; new_ddm.implement_closure( move |req, _ddm| match req { Request::CreateDataSource { id } => { self::data_source::implement_data_source(id); } Request::GetDataDevice { id, seat } => match Seat::from_resource(&seat) { Some(seat) => { // ensure the seat user_data is ready seat.user_data() .insert_if_missing(|| RefCell::new(SeatData::new(log.clone()))); let seat_data = seat.user_data().get::<RefCell<SeatData>>().unwrap(); let data_device = implement_data_device( id, seat.clone(), callback.clone(), action_choice.clone(), token.clone(), log.clone(), ); seat_data.borrow_mut().known_devices.push(data_device); } None => { error!(log, "Unmanaged seat given to a data device."); } }, _ => unreachable!(), }, None::<fn(_)>, (), ) } struct DataDeviceData { callback: Rc<RefCell<dyn FnMut(DataDeviceEvent) + 'static>>, action_choice: Rc<RefCell<dyn FnMut(DndAction, DndAction) -> DndAction + 'static>>, } fn implement_data_device<F, C, R>( new_dd: NewResource<wl_data_device::WlDataDevice>, seat: Seat, callback: Rc<RefCell<C>>, action_choice: Rc<RefCell<F>>, token: CompositorToken<R>, log: ::slog::Logger, ) -> wl_data_device::WlDataDevice where F: FnMut(DndAction, DndAction) -> DndAction + 'static, C: FnMut(DataDeviceEvent) + 'static, R: Role<DnDIconRole> + 'static, { use self::wl_data_device::Request; let dd_data = DataDeviceData { callback: callback.clone(), action_choice, }; new_dd.implement_closure( move |req, dd| match req { Request::StartDrag { source, origin, icon, serial, } => { /* TODO: handle the icon */ if let Some(pointer) = seat.get_pointer() { if pointer.has_grab(serial) { if let Some(ref icon) = icon { if token.give_role::<DnDIconRole>(icon).is_err() { dd.as_ref().post_error( wl_data_device::Error::Role as u32, "Given surface already has an other role".into(), ); return; } } // The StartDrag is in response to a pointer implicit grab, all is good (&mut *callback.borrow_mut())(DataDeviceEvent::DnDStarted { source: source.clone(), icon: icon.clone(), }); pointer.set_grab( dnd_grab::DnDGrab::new( source, origin, seat.clone(), icon.clone(), token.clone(), callback.clone(), ), serial, ); return; } } debug!(log, "denying drag from client without implicit grab"); } Request::SetSelection { source, serial: _ } => { if let Some(keyboard) = seat.get_keyboard() { if dd .as_ref() .client() .as_ref() .map(|c| keyboard.has_focus(c)) .unwrap_or(false) { let seat_data = seat.user_data().get::<RefCell<SeatData>>().unwrap(); (&mut *callback.borrow_mut())(DataDeviceEvent::NewSelection(source.clone())); // The client has kbd focus, it can set the selection seat_data .borrow_mut() .set_selection(source.map(Selection::Client).unwrap_or(Selection::Empty)); return; } } debug!(log, "denying setting selection by a non-focused client"); } Request::Release => { // Clean up the known devices seat.user_data() .get::<RefCell<SeatData>>() .unwrap() .borrow_mut() .known_devices .retain(|ndd| ndd.as_ref().is_alive() && (!ndd.as_ref().equals(&dd.as_ref()))) } _ => unreachable!(), }, None::<fn(_)>, dd_data, ) } /// A simple action chooser for DnD negociation /// /// If the preferred action is available, it'll pick it. Otherwise, it'll pick the first /// available in the following order: Ask, Copy, Move. pub fn default_action_chooser(available: DndAction, preferred: DndAction) -> DndAction { // if the preferred action is valid (a single action) and in the available actions, use it // otherwise, follow a fallback stategy if [DndAction::Move, DndAction::Copy, DndAction::Ask].contains(&preferred) && available.contains(preferred) { preferred } else if available.contains(DndAction::Ask) { DndAction::Ask } else if available.contains(DndAction::Copy) { DndAction::Copy } else if available.contains(DndAction::Move) { DndAction::Move } else { DndAction::empty() } }
use cocoa::base::id; use cocoa::foundation::{NSInteger, NSRange, NSUInteger}; use libc::{uint32_t, c_void}; use types::{MTLScissorRect, MTLViewport}; pub trait MTLRenderCommandEncoder { unsafe fn setBlendColorRed_green_blue_alpha(self, red: f32, green: f32, blue: f32, alpha: f32); unsafe fn setCullMode(self, cullMode: MTLCullMode); unsafe fn setDepthBias_slopeScale_clamp(self, depthBias: f32, slopeScale: f32, clamp: f32); unsafe fn setDepthClipMode(self, depthClipMode: MTLDepthClipMode); unsafe fn setDepthStencilState(self, depthStencilState: id); unsafe fn setFrontFacingWinding(self, frontFacingWinding: MTLWinding); unsafe fn setRenderPipelineState(self, renderPipelineState: id); unsafe fn setScissorRect(self, scissorRect: MTLScissorRect); unsafe fn setStencilFrontReferenceValue_backReferenceValue(self, frontReferenceValue: uint32_t, backReferenceValue: uint32_t); unsafe fn setStencilReferenceValue(self, referenceValue: uint32_t); unsafe fn setTriangleFillMode(self, fillMode: MTLTriangleFillMode); unsafe fn setViewport(self, viewport: MTLViewport); unsafe fn setVisibilityResultMode_offset(self, visibilityResultMode: MTLVisibilityResultMode, offset: NSUInteger); unsafe fn setVertexBuffer_offset_atIndex(self, buffer: id, offset: NSUInteger, index: NSUInteger); unsafe fn setVertexBuffers_offsets_withRange(self, buffers: *const id, offsets: *const NSUInteger, range: NSRange); unsafe fn setVertexBufferOffset_atIndex(self, offset: NSUInteger, index: NSUInteger); unsafe fn setVertexBytes_length_atIndex(self, bytes: *const c_void, length: NSUInteger, index: NSUInteger); unsafe fn setVertexSamplerState_atIndex(self, sampler: id, index: NSUInteger); unsafe fn setVertexSamplerStates_withRange(self, samplers: *const id, range: NSRange); unsafe fn setVertexSamplerState_lodMinClamp_lodMaxClamp_atIndex(self, samplers: id, lodMinClamp: f32, lodMaxClamp: f32, index: NSUInteger); unsafe fn setVertexSamplerStates_lodMinClamps_lodMaxClamps_withRange(self, samplers: *const id, lodMinClamps: *const f32, lodMaxClamps: *const f32, range: NSRange); unsafe fn setVertexTexture_atIndex(self, texture: id, index: NSUInteger); unsafe fn setVertexTextures_withRange(self, textures: *const id, range: NSRange); unsafe fn setFragmentBuffer_offset_atIndex(self, buffer: id, offset: NSUInteger, index: NSUInteger); unsafe fn setFragmentBuffers_offsets_withRange(self, buffers: *const id, offsets: *const NSUInteger, range: NSRange); unsafe fn setFragmentBufferOffset_atIndex(self, offset: NSUInteger, index: NSUInteger); unsafe fn setFragmentBytes_length_atIndex(self, bytes: *const c_void, length: NSUInteger, index: NSUInteger); unsafe fn setFragmentSamplerState_atIndex(self, sampler: id, index: NSUInteger); unsafe fn setFragmentSamplerStates_withRange(self, samplers: *const id, range: NSRange); unsafe fn setFragmentSamplerState_lodMinClamp_lodMaxClamp_atIndex(self, samplers: id, lodMinClamp: f32, lodMaxClamp: f32, index: NSUInteger); unsafe fn setFragmentSamplerStates_lodMinClamps_lodMaxClamps_withRange(self, samplers: *const id, lodMinClamps: f32, lodMaxClamps: f32, range: NSRange); unsafe fn setFragmentTexture_atIndex(self, texture: id, index: NSUInteger); unsafe fn setFragmentTextures_withRange(self, textures: *const id, range: NSRange); unsafe fn drawPrimitives_vertexStart_vertexCount_instanceCount_baseInstance(self, primitiveType: MTLPrimitiveType, vertexStart: NSUInteger, vertexCount: NSUInteger, instanceCount: NSUInteger, baseInstance: NSUInteger); unsafe fn drawPrimitives_vertexStart_vertexCount_instanceCount(self, primitiveType: MTLPrimitiveType, vertexStart: NSUInteger, vertexCount: NSUInteger, instanceCount: NSUInteger); unsafe fn drawPrimitives_vertexStart_vertexCount(self, vertexStart: NSUInteger, vertexCount: NSUInteger); unsafe fn drawPrimitives_indirectBuffer_indirectBufferOffset(self, primitiveType: MTLPrimitiveType, indirectBuffer: id, indirectBufferOffset: NSUInteger); unsafe fn drawIndexedPrimitives_indexCount_indexType_indexBuffer_indexBufferOffset_instanceCount_baseVertex_baseInstance( self, primitiveType: MTLPrimitiveType, indexCount: NSUInteger, indexType: MTLIndexType, indexBuffer: id, indexBufferOffset: NSUInteger, instanceCount: NSUInteger, baseVertex: NSInteger, baseInstance: NSUInteger); unsafe fn drawIndexedPrimitives_indexCount_indexType_indexBuffer_indexBufferOffset_instanceCount( self, primitiveType: MTLPrimitiveType, indexCount: NSUInteger, indexType: MTLIndexType, indexBuffer: id, indexBufferOffset: NSUInteger, instanceCount: NSUInteger); unsafe fn drawIndexedPrimitives_indexCount_indexType_indexBuffer_indexBufferOffset( self, primitiveType: MTLPrimitiveType, indexCount: NSUInteger, indexType: MTLIndexType, indexBuffer: id, indexBufferOffset: NSUInteger); unsafe fn drawIndexedPrimitives_indexType_indexBuffer_indexBufferOffset_indirectBuffer_indirectBufferOffset(self, primitiveType: MTLPrimitiveType, indexType: MTLIndexType, indexBuffer: id, indexBufferOffset: NSUInteger, indirectBuffer: id, indirectBufferOffset: NSUInteger); } impl MTLRenderCommandEncoder for id { unsafe fn setBlendColorRed_green_blue_alpha(self, red: f32, green: f32, blue: f32, alpha: f32) { unimplemented!(); } unsafe fn setCullMode(self, cullMode: MTLCullMode) { unimplemented!(); } unsafe fn setDepthBias_slopeScale_clamp(self, depthBias: f32, slopeScale: f32, clamp: f32) { unimplemented!(); } unsafe fn setDepthClipMode(self, depthClipMode: MTLDepthClipMode) { unimplemented!(); } unsafe fn setDepthStencilState(self, depthStencilState: id) { unimplemented!(); } unsafe fn setFrontFacingWinding(self, frontFacingWinding: MTLWinding) { unimplemented!(); } unsafe fn setRenderPipelineState(self, renderPipelineState: id) { unimplemented!(); } unsafe fn setScissorRect(self, scissorRect: MTLScissorRect) { unimplemented!(); } unsafe fn setStencilFrontReferenceValue_backReferenceValue(self, frontReferenceValue: uint32_t, backReferenceValue: uint32_t) { unimplemented!(); } unsafe fn setStencilReferenceValue(self, referenceValue: uint32_t) { unimplemented!(); } unsafe fn setTriangleFillMode(self, fillMode: MTLTriangleFillMode) { unimplemented!(); } unsafe fn setViewport(self, viewport: MTLViewport) { unimplemented!(); } unsafe fn setVisibilityResultMode_offset(self, visibilityResultMode: MTLVisibilityResultMode, offset: NSUInteger) { unimplemented!(); } unsafe fn setVertexBuffer_offset_atIndex(self, buffer: id, offset: NSUInteger, index: NSUInteger) { unimplemented!(); } unsafe fn setVertexBuffers_offsets_withRange(self, buffers: *const id, offsets: *const NSUInteger, range: NSRange) { unimplemented!(); } unsafe fn setVertexBufferOffset_atIndex(self, offset: NSUInteger, index: NSUInteger) { unimplemented!(); } unsafe fn setVertexBytes_length_atIndex(self, bytes: *const c_void, length: NSUInteger, index: NSUInteger) { unimplemented!(); } unsafe fn setVertexSamplerState_atIndex(self, sampler: id, index: NSUInteger) { unimplemented!(); } unsafe fn setVertexSamplerStates_withRange(self, samplers: *const id, range: NSRange) { unimplemented!(); } unsafe fn setVertexSamplerState_lodMinClamp_lodMaxClamp_atIndex(self, samplers: id, lodMinClamp: f32, lodMaxClamp: f32, index: NSUInteger) { unimplemented!(); } unsafe fn setVertexSamplerStates_lodMinClamps_lodMaxClamps_withRange(self, samplers: *const id, lodMinClamps: *const f32, lodMaxClamps: *const f32, range: NSRange) { unimplemented!(); } unsafe fn setVertexTexture_atIndex(self, texture: id, index: NSUInteger) { unimplemented!(); } unsafe fn setVertexTextures_withRange(self, textures: *const id, range: NSRange) { unimplemented!(); } unsafe fn setFragmentBuffer_offset_atIndex(self, buffer: id, offset: NSUInteger, index: NSUInteger) { unimplemented!(); } unsafe fn setFragmentBuffers_offsets_withRange(self, buffers: *const id, offsets: *const NSUInteger, range: NSRange) { unimplemented!(); } unsafe fn setFragmentBufferOffset_atIndex(self, offset: NSUInteger, index: NSUInteger) { unimplemented!(); } unsafe fn setFragmentBytes_length_atIndex(self, bytes: *const c_void, length: NSUInteger, index: NSUInteger) { unimplemented!(); } unsafe fn setFragmentSamplerState_atIndex(self, sampler: id, index: NSUInteger) { unimplemented!(); } unsafe fn setFragmentSamplerStates_withRange(self, samplers: *const id, range: NSRange) { unimplemented!(); } unsafe fn setFragmentSamplerState_lodMinClamp_lodMaxClamp_atIndex(self, samplers: id, lodMinClamp: f32, lodMaxClamp: f32, index: NSUInteger) { unimplemented!(); } unsafe fn setFragmentSamplerStates_lodMinClamps_lodMaxClamps_withRange(self, samplers: *const id, lodMinClamps: f32, lodMaxClamps: f32, range: NSRange) { unimplemented!(); } unsafe fn setFragmentTexture_atIndex(self, texture: id, index: NSUInteger) { unimplemented!(); } unsafe fn setFragmentTextures_withRange(self, textures: *const id, range: NSRange) { unimplemented!(); } unsafe fn drawPrimitives_vertexStart_vertexCount_instanceCount_baseInstance(self, primitiveType: MTLPrimitiveType, vertexStart: NSUInteger, vertexCount: NSUInteger, instanceCount: NSUInteger, baseInstance: NSUInteger) { unimplemented!(); } unsafe fn drawPrimitives_vertexStart_vertexCount_instanceCount(self, primitiveType: MTLPrimitiveType, vertexStart: NSUInteger, vertexCount: NSUInteger, instanceCount: NSUInteger) { unimplemented!(); } unsafe fn drawPrimitives_vertexStart_vertexCount(self, vertexStart: NSUInteger, vertexCount: NSUInteger) { unimplemented!(); } unsafe fn drawPrimitives_indirectBuffer_indirectBufferOffset(self, primitiveType: MTLPrimitiveType, indirectBuffer: id, indirectBufferOffset: NSUInteger) { unimplemented!(); } unsafe fn drawIndexedPrimitives_indexCount_indexType_indexBuffer_indexBufferOffset_instanceCount_baseVertex_baseInstance( self, primitiveType: MTLPrimitiveType, indexCount: NSUInteger, indexType: MTLIndexType, indexBuffer: id, indexBufferOffset: NSUInteger, instanceCount: NSUInteger, baseVertex: NSInteger, baseInstance: NSUInteger) { unimplemented!(); } unsafe fn drawIndexedPrimitives_indexCount_indexType_indexBuffer_indexBufferOffset_instanceCount( self, primitiveType: MTLPrimitiveType, indexCount: NSUInteger, indexType: MTLIndexType, indexBuffer: id, indexBufferOffset: NSUInteger, instanceCount: NSUInteger) { unimplemented!(); } unsafe fn drawIndexedPrimitives_indexCount_indexType_indexBuffer_indexBufferOffset( self, primitiveType: MTLPrimitiveType, indexCount: NSUInteger, indexType: MTLIndexType, indexBuffer: id, indexBufferOffset: NSUInteger) { unimplemented!(); } unsafe fn drawIndexedPrimitives_indexType_indexBuffer_indexBufferOffset_indirectBuffer_indirectBufferOffset(self, primitiveType: MTLPrimitiveType, indexType: MTLIndexType, indexBuffer: id, indexBufferOffset: NSUInteger, indirectBuffer: id, indirectBufferOffset: NSUInteger) { unimplemented!(); } } #[repr(usize)] #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub enum MTLPrimitiveType { MTLPrimitiveTypePoint = 0, MTLPrimitiveTypeLine = 1, MTLPrimitiveTypeLineStrip = 2, MTLPrimitiveTypeTriangle = 3, MTLPrimitiveTypeTriangleStrip = 4 } #[repr(usize)] #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub enum MTLIndexType { MTLIndexTypeUInt16 = 0, MTLIndexTypeUInt32 = 1 } #[repr(usize)] #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub enum MTLVisibilityResultMode { MTLVisibilityResultModeDisabled = 0, MTLVisibilityResultModeBoolean = 1, MTLVisibilityResultModeCounting = 2 } #[repr(usize)] #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub enum MTLCullMode { MTLCullModeNone = 0, MTLCullModeFront = 1, MTLCullModeBack = 2 } #[repr(usize)] #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub enum MTLDepthClipMode { MTLDepthClipModeClip = 0, MTLDepthClipModeClamp = 1 } #[repr(usize)] #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub enum MTLWinding { MTLWindingClockwise = 0, MTLWindingCounterClockwise = 1 } #[repr(usize)] #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub enum MTLTriangleFillMode { MTLTriangleFillModeFill = 0, MTLTriangleFillModeLines = 1 }
use proc_macro; use proc_macro::TokenStream; use quote::{format_ident, quote}; #[proc_macro] pub fn build_test_renderer(input: TokenStream) -> TokenStream { let renderer_name = format_ident!("{}", input.to_string()); let exp = quote! { let mut character_sizes = std::collections::HashMap::new(); let mut canvas = CanvasMock::new(); let config = build_config(); let mut surface = sdl2::surface::Surface::new(512, 512, sdl2::pixels::PixelFormatEnum::RGB24).unwrap(); let mut surface_canvas = sdl2::render::Canvas::from_surface(surface).unwrap(); let mut texture_creator = surface_canvas.texture_creator(); let mut texture_manager = crate::renderer::managers::TextureManager::new(&texture_creator); let mut ttf_context = sdl2::ttf::Sdl2TtfContext {}; let mut #renderer_name = SimpleRendererMock { config: config.clone(), ttf: ttf_context, character_sizes, texture_manager, }; }; exp.into() }
#[doc = "Register `SYSCFG_ITLINE5` reader"] pub type R = crate::R<SYSCFG_ITLINE5_SPEC>; #[doc = "Field `EXTI0` reader - EXTI line 0 interrupt request pending"] pub type EXTI0_R = crate::BitReader; #[doc = "Field `EXTI1` reader - EXTI line 1 interrupt request pending"] pub type EXTI1_R = crate::BitReader; impl R { #[doc = "Bit 0 - EXTI line 0 interrupt request pending"] #[inline(always)] pub fn exti0(&self) -> EXTI0_R { EXTI0_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - EXTI line 1 interrupt request pending"] #[inline(always)] pub fn exti1(&self) -> EXTI1_R { EXTI1_R::new(((self.bits >> 1) & 1) != 0) } } #[doc = "SYSCFG interrupt line 5 status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`syscfg_itline5::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct SYSCFG_ITLINE5_SPEC; impl crate::RegisterSpec for SYSCFG_ITLINE5_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`syscfg_itline5::R`](R) reader structure"] impl crate::Readable for SYSCFG_ITLINE5_SPEC {} #[doc = "`reset()` method sets SYSCFG_ITLINE5 to value 0"] impl crate::Resettable for SYSCFG_ITLINE5_SPEC { const RESET_VALUE: Self::Ux = 0; }
#[doc = "Reader of register CMP0_SW_CLEAR"] pub type R = crate::R<u32, super::CMP0_SW_CLEAR>; #[doc = "Writer for register CMP0_SW_CLEAR"] pub type W = crate::W<u32, super::CMP0_SW_CLEAR>; #[doc = "Register CMP0_SW_CLEAR `reset()`'s with value 0"] impl crate::ResetValue for super::CMP0_SW_CLEAR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `CMP0_IP0`"] pub type CMP0_IP0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CMP0_IP0`"] pub struct CMP0_IP0_W<'a> { w: &'a mut W, } impl<'a> CMP0_IP0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `CMP0_AP0`"] pub type CMP0_AP0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CMP0_AP0`"] pub struct CMP0_AP0_W<'a> { w: &'a mut W, } impl<'a> CMP0_AP0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `CMP0_BP0`"] pub type CMP0_BP0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CMP0_BP0`"] pub struct CMP0_BP0_W<'a> { w: &'a mut W, } impl<'a> CMP0_BP0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `CMP0_IN0`"] pub type CMP0_IN0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CMP0_IN0`"] pub struct CMP0_IN0_W<'a> { w: &'a mut W, } impl<'a> CMP0_IN0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Reader of field `CMP0_AN0`"] pub type CMP0_AN0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CMP0_AN0`"] pub struct CMP0_AN0_W<'a> { w: &'a mut W, } impl<'a> CMP0_AN0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "Reader of field `CMP0_BN0`"] pub type CMP0_BN0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CMP0_BN0`"] pub struct CMP0_BN0_W<'a> { w: &'a mut W, } impl<'a> CMP0_BN0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } #[doc = "Reader of field `CMP0_VN0`"] pub type CMP0_VN0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CMP0_VN0`"] pub struct CMP0_VN0_W<'a> { w: &'a mut W, } impl<'a> CMP0_VN0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } impl R { #[doc = "Bit 0 - see corresponding bit in CMP0_SW"] #[inline(always)] pub fn cmp0_ip0(&self) -> CMP0_IP0_R { CMP0_IP0_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - see corresponding bit in CMP0_SW"] #[inline(always)] pub fn cmp0_ap0(&self) -> CMP0_AP0_R { CMP0_AP0_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - see corresponding bit in CMP0_SW"] #[inline(always)] pub fn cmp0_bp0(&self) -> CMP0_BP0_R { CMP0_BP0_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 4 - see corresponding bit in CMP0_SW"] #[inline(always)] pub fn cmp0_in0(&self) -> CMP0_IN0_R { CMP0_IN0_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - see corresponding bit in CMP0_SW"] #[inline(always)] pub fn cmp0_an0(&self) -> CMP0_AN0_R { CMP0_AN0_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 6 - see corresponding bit in CMP0_SW"] #[inline(always)] pub fn cmp0_bn0(&self) -> CMP0_BN0_R { CMP0_BN0_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 7 - see corresponding bit in CMP0_SW"] #[inline(always)] pub fn cmp0_vn0(&self) -> CMP0_VN0_R { CMP0_VN0_R::new(((self.bits >> 7) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - see corresponding bit in CMP0_SW"] #[inline(always)] pub fn cmp0_ip0(&mut self) -> CMP0_IP0_W { CMP0_IP0_W { w: self } } #[doc = "Bit 1 - see corresponding bit in CMP0_SW"] #[inline(always)] pub fn cmp0_ap0(&mut self) -> CMP0_AP0_W { CMP0_AP0_W { w: self } } #[doc = "Bit 2 - see corresponding bit in CMP0_SW"] #[inline(always)] pub fn cmp0_bp0(&mut self) -> CMP0_BP0_W { CMP0_BP0_W { w: self } } #[doc = "Bit 4 - see corresponding bit in CMP0_SW"] #[inline(always)] pub fn cmp0_in0(&mut self) -> CMP0_IN0_W { CMP0_IN0_W { w: self } } #[doc = "Bit 5 - see corresponding bit in CMP0_SW"] #[inline(always)] pub fn cmp0_an0(&mut self) -> CMP0_AN0_W { CMP0_AN0_W { w: self } } #[doc = "Bit 6 - see corresponding bit in CMP0_SW"] #[inline(always)] pub fn cmp0_bn0(&mut self) -> CMP0_BN0_W { CMP0_BN0_W { w: self } } #[doc = "Bit 7 - see corresponding bit in CMP0_SW"] #[inline(always)] pub fn cmp0_vn0(&mut self) -> CMP0_VN0_W { CMP0_VN0_W { w: self } } }
use std::cmp; use std::env; use std::fs; use std::path::Path; #[derive(Copy, Clone, Debug)] enum GameObjectClass { Player = 1, Enemy = 2, } #[derive(PartialEq, Copy, Clone, Debug)] enum ItemCategory { Weapon = 1, Armor = 2, Misc = 3, } #[derive(PartialEq, Copy, Clone, Debug)] enum ItemClass { Damage = 1, Defense = 2, } #[derive(Copy, Clone, Debug)] struct Item<'a> { key: &'a str, val: i32, cost: i32, class: ItemClass, category: ItemCategory, } #[derive(Copy, Clone, Debug)] struct GameObject<'a> { key: &'a str, hp: i32, dmg: i32, armor: i32, } fn read_shop_contents(filename: &str) -> String { let fpath = Path::new(filename); let abspath = env::current_dir().unwrap().into_boxed_path().join(fpath); let contents = fs::read_to_string(abspath).expect("Error reading shop details"); return contents; } fn make_items_list(items: &str) -> Vec<Item> { let mut shop_items: Vec<Item> = Vec::new(); let mut current_category = ItemCategory::Weapon; for itm in items.lines() { let props = itm .split(|c: char| !c.is_alphanumeric()) .filter(|b| !b.is_empty()) .collect::<Vec<&str>>(); if props.len() <= 0 { continue; } match props[0] { "Weapons" => { current_category = ItemCategory::Weapon; continue; } "Armor" => { current_category = ItemCategory::Armor; continue; } "Rings" => { current_category = ItemCategory::Misc; continue; } _ => match current_category { ItemCategory::Weapon => { let v1: i32 = props[2].parse().unwrap(); let v2: i32 = props[3].parse().unwrap(); let new_category = current_category.clone(); let new_item = Item { key: props[0], val: cmp::max(v1, v2), class: ItemClass::Damage, cost: props[1].parse().unwrap(), category: new_category, }; shop_items.push(new_item); } ItemCategory::Armor => { let v1: i32 = props[2].parse().unwrap(); let v2: i32 = props[3].parse().unwrap(); let new_category = current_category.clone(); let new_item = Item { key: props[0], val: cmp::max(v1, v2), class: ItemClass::Defense, cost: props[1].parse().unwrap(), category: new_category, }; shop_items.push(new_item); } ItemCategory::Misc => { let v1: i32 = props[2].parse().unwrap(); let v2: i32 = props[3].parse().unwrap(); let new_category = current_category.clone(); let itm_class: ItemClass; if v1 > v2 { itm_class = ItemClass::Damage } else { itm_class = ItemClass::Defense; } let new_item = Item { key: props[0], val: cmp::max(v1, v2), class: itm_class, cost: props[1].parse().unwrap(), category: new_category, }; shop_items.push(new_item); } }, } } shop_items.sort_by(|a, b| b.cost.cmp(&a.cost)); return shop_items; } fn find_optimal_gear(items: Vec<Item>) { let weapons = items .clone() .into_iter() .filter(|a| a.category == ItemCategory::Weapon) .collect::<Vec<Item>>(); let armor = items .clone() .into_iter() .filter(|a| a.category == ItemCategory::Armor) .collect::<Vec<Item>>(); let mis_dmg = items .clone() .into_iter() .filter(|a| a.category == ItemCategory::Misc && a.class == ItemClass::Damage) .collect::<Vec<Item>>(); let mis_def = items .clone() .into_iter() .filter(|a| a.category == ItemCategory::Misc && a.class == ItemClass::Defense) .collect::<Vec<Item>>(); let mut player = GameObject { key: "player", hp: 100, dmg: 0, armor: 0, }; let mut enemy = GameObject { key: "enemy", hp: 104, dmg: 8, armor: 1, }; let (mut min_cost, mut max_cost) = (std::i32::MAX, std::i32::MIN); for w in 0..weapons.len() { for a in 0..armor.len() { for rl in 0..mis_dmg.len() { for rr in 0..mis_def.len() { reset(&mut player, &mut enemy); let mut equipped:Vec<Item> = Vec::new(); equipped.push(weapons[w]); equipped.push(armor[a]); equipped.push(mis_dmg[rl]); equipped.push(mis_def[rr]); let cost = get_gear_cost(equipped.clone()); equip_player(&mut player, equipped); let winner = death_match(&mut player, &mut enemy); match winner.0{ GameObjectClass::Player => { if min_cost > cost{ min_cost = cost; } }, GameObjectClass::Enemy => { if max_cost < cost{ max_cost = cost; } } } } } } } println!("\nOptimal Cost: {} \nMax Cost: {}\n", min_cost, max_cost); } fn reset(player: &mut GameObject, enemy: &mut GameObject) { player.hp = 100; player.armor = 0; player.dmg = 0; enemy.hp = 104; enemy.dmg = 8; enemy.armor = 1; } fn equip_player(player: &mut GameObject, items: Vec<Item>) { for itm in items.iter() { match itm.category { ItemCategory::Weapon => { player.dmg += itm.val; } ItemCategory::Armor => { player.armor += itm.val; } ItemCategory::Misc => match itm.class { ItemClass::Damage => { player.dmg += itm.val; } ItemClass::Defense => { player.armor += itm.val; } }, } } } fn get_gear_cost(items:Vec<Item>) ->i32{ let mut cost = 0; for itm in items.iter(){ cost+=itm.cost; } return cost; } fn death_match(player: &mut GameObject, enemy: &mut GameObject) -> (GameObjectClass, i32) { let mut turns = 1; loop { enemy.hp -= player.dmg - enemy.armor; if enemy.hp <= 0 { return (GameObjectClass::Player, turns); } player.hp -= enemy.dmg - player.armor; if player.hp <= 0 { return (GameObjectClass::Enemy, turns); } turns += 1; } } pub fn run() { let items_list = read_shop_contents("inputs/day-21.txt"); let shop_items = make_items_list(&items_list); find_optimal_gear(shop_items); }
use std::collections::HashMap; use hyper::http::Request; use svc_authn::jose::ConfigMap; use tower::ServiceExt; use super::*; use crate::app::http; use crate::test_helpers::prelude::*; #[tokio::test] async fn test_healthz() { let state = TestState::new(TestAuthz::new()).await; let state = Arc::new(state) as Arc<dyn AppContext>; let app = http::router(state, HashMap::new()); let resp = app .oneshot( Request::builder() .uri("/healthz") .body(Body::empty()) .unwrap(), ) .await .unwrap(); assert_eq!(resp.status(), 200); let body = hyper::body::to_bytes(resp.into_body()).await.unwrap(); assert_eq!(&body[..], b"Ok"); } #[tokio::test] async fn test_api_rollback() { let agent = TestAgent::new("web", "user123", USR_AUDIENCE); let token = agent.token(); let mut authz = TestAuthz::new(); authz.set_audience(SVC_AUDIENCE); authz.allow(agent.account_id(), vec!["scopes"], "rollback"); let state = TestState::new(authz).await; let state = Arc::new(state) as Arc<dyn AppContext>; let app = crate::app::http::router(state.clone(), make_authn()); let scope = shared_helpers::random_string(); { let mut conn = state.get_conn().await.expect("Failed to get conn"); let frontend = factory::Frontend::new("http://v2.testing00.foxford.ru".into()) .execute(&mut conn) .await .expect("Failed to seed frontend"); factory::Scope::new(scope.clone(), frontend.id, "webinar".into()) .execute(&mut conn) .await .expect("Failed to seed scope"); } let path = format!("/api/scopes/{}/rollback", scope); let req = Request::post(path) .header("Authorization", format!("Bearer {}", token)) .body(Body::empty()) .unwrap(); let resp = app.oneshot(req).await.unwrap(); //assert_eq!(resp.status(), 200); let body = hyper::body::to_bytes(resp.into_body()).await.unwrap(); assert_eq!(&body[..], b"Ok"); } fn make_authn() -> ConfigMap { use crate::test_helpers::*; serde_json::from_str(&format!( r###" {{ "{}": {{ "algorithm": "ES256", "audience": ["{}"], "key": "{}" }} }} "###, TOKEN_ISSUER, USR_AUDIENCE, PUBKEY_PATH )) .unwrap() }
use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use uuid::Uuid; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct TodoItem { pub id: Uuid, pub name: String, pub completed: bool, #[serde(with = "my_date_format")] pub created_date: DateTime<Utc>, #[serde(with = "my_date_format")] pub updated_date: DateTime<Utc>, } impl TodoItem { pub fn new(name: &str) -> Self { TodoItem { id: Uuid::new_v4(), name: String::from(name), completed: false, created_date: Utc::now(), updated_date: Utc::now(), } } pub fn set_completion(&mut self, is_complete: bool) -> &Self { self.completed = is_complete; self.updated_date = Utc::now(); self } pub fn set_name(&mut self, name: &str) -> &Self { self.name = name.to_string(); self.updated_date = Utc::now(); self } } #[cfg(test)] mod tests { use super::*; #[test] fn it_creates_new_item() { let name = "test task"; let item = TodoItem::new(name); assert_eq!(item.name, name); assert_eq!(item.completed, false); } #[test] fn it_sets_completion_status() { let name = "test task"; let mut item = TodoItem::new(name); assert_eq!(item.completed, false); item.set_completion(true); assert_eq!(item.completed, true); } #[test] fn it_sets_name() { let name = "test task"; let new_name = "new test task"; let mut item = TodoItem::new(name); item.set_name(new_name); assert_eq!(item.name, new_name); } } mod my_date_format { use chrono::{DateTime, TimeZone, Utc}; use serde::{self, Deserialize, Deserializer, Serializer}; const FORMAT: &str = "%Y-%m-%d %H:%M:%S"; // The signature of a serialize_with function must follow the pattern: // // fn serialize<S>(&T, S) -> Result<S::Ok, S::Error> // where // S: Serializer // // although it may also be generic over the input types T. pub fn serialize<S>(date: &DateTime<Utc>, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let s = format!("{}", date.format(FORMAT)); serializer.serialize_str(&s) } // The signature of a deserialize_with function must follow the pattern: // // fn deserialize<'de, D>(D) -> Result<T, D::Error> // where // D: Deserializer<'de> // // although it may also be generic over the output types T. pub fn deserialize<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; Utc.datetime_from_str(&s, FORMAT) .map_err(serde::de::Error::custom) } }
use std::sync::Arc; use crate::config::VolumeCtrl; pub mod mappings; use self::mappings::MappedCtrl; pub struct NoOpVolume; pub trait Mixer: Send + Sync { fn open(config: MixerConfig) -> Self where Self: Sized; fn set_volume(&self, volume: u16); fn volume(&self) -> u16; fn get_soft_volume(&self) -> Box<dyn VolumeGetter + Send> { Box::new(NoOpVolume) } } pub trait VolumeGetter { fn attenuation_factor(&self) -> f64; } impl VolumeGetter for NoOpVolume { fn attenuation_factor(&self) -> f64 { 1.0 } } pub mod softmixer; use self::softmixer::SoftMixer; #[cfg(feature = "alsa-backend")] pub mod alsamixer; #[cfg(feature = "alsa-backend")] use self::alsamixer::AlsaMixer; #[derive(Debug, Clone)] pub struct MixerConfig { pub device: String, pub control: String, pub index: u32, pub volume_ctrl: VolumeCtrl, } impl Default for MixerConfig { fn default() -> MixerConfig { MixerConfig { device: String::from("default"), control: String::from("PCM"), index: 0, volume_ctrl: VolumeCtrl::default(), } } } pub type MixerFn = fn(MixerConfig) -> Arc<dyn Mixer>; fn mk_sink<M: Mixer + 'static>(config: MixerConfig) -> Arc<dyn Mixer> { Arc::new(M::open(config)) } pub const MIXERS: &[(&str, MixerFn)] = &[ (SoftMixer::NAME, mk_sink::<SoftMixer>), // default goes first #[cfg(feature = "alsa-backend")] (AlsaMixer::NAME, mk_sink::<AlsaMixer>), ]; pub fn find(name: Option<&str>) -> Option<MixerFn> { if let Some(name) = name { MIXERS .iter() .find(|mixer| name == mixer.0) .map(|mixer| mixer.1) } else { MIXERS.first().map(|mixer| mixer.1) } }
use crate::testing::*; #[test] fn test_bad_attributes() { assert_parse_error! { r#"fn main() { #[foo] #[bar] hello }"#, span, AttributesNotSupported => { assert_eq!(span, Span::new(12, 25)); } }; }
extern crate num_traits; use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; pub trait Sqrt { type Output; fn sqrt(self) -> Self::Output; } impl Sqrt for f32 { type Output = f32; fn sqrt(self) -> Self::Output { self.sqrt() } } impl Sqrt for f64 { type Output = f64; fn sqrt(self) -> Self::Output { self.sqrt() } } impl Sqrt for i32 { type Output = i32; fn sqrt(self) -> Self::Output { (self as f32).sqrt() as i32 } } #[derive(Copy, Clone, Debug, PartialEq, PartialOrd)] pub struct Vec3<T> { pub x: T, pub y: T, pub z: T, } impl<T: Sqrt<Output = T> + num_traits::Num + Copy> Vec3<T> { pub fn zero() -> Vec3<T> { Vec3 { x: T::zero(), y: T::zero(), z: T::zero(), } } pub fn one() -> Vec3<T> { Vec3 { x: T::one(), y: T::one(), z: T::one(), } } pub fn new(x: T, y: T, z: T) -> Vec3<T> { Vec3 { x: x, y: y, z: z } } // Returns a new copy of self with the x-value replaced // with the specified value. pub fn with_x(self, x: T) -> Vec3<T> { return Vec3 { x: x, y: self.y, z: self.z, }; } // Returns a new copy of self with the y-value replaced // with the specified value. pub fn with_y(self, y: T) -> Vec3<T> { return Vec3 { x: self.x, y: y, z: self.z, }; } // Returns a new copy of self with the z-value replaced // with the specified value. pub fn with_z(self, z: T) -> Vec3<T> { return Vec3 { x: self.x, y: self.y, z: z, }; } pub fn normalize(self) -> Vec3<T> { self / self.length() } pub fn length(&self) -> T { return self.length_squared().sqrt(); } pub fn dot(a: &Vec3<T>, b: &Vec3<T>) -> T { return a.x * b.x + a.y * b.y + a.z * b.z; } pub fn length_squared(&self) -> T { Vec3::dot(&self, &self) } pub fn cross(a: &Vec3<T>, b: &Vec3<T>) -> Vec3<T> { return Vec3 { x: a.y * b.z - a.z * b.y, y: a.z * b.x - a.x * b.z, z: a.x * b.y - a.y * b.x, }; } } // This macro helps us implement math operators on Vector3 // in such a way that it handles binary operators on any // combination of Vec3, &Vec3 and f32. macro_rules! impl_binary_operations { // $VectorType is something like `Vec3` // $Op is something like `Add` // $op_fn is something like `add` // $op_symbol is something like `+` ($VectorType:ident $Op:ident $op_fn:ident $op_symbol:tt) => { // Implement a + b where a and b are both of type &VectorType. // Lower down we'll implement cases where either a or b - or both // - are values by forwarding through to this implementation. impl<'a, 'b, T: $Op<Output = T> + Copy> $Op<&'a $VectorType<T>> for &'b $VectorType<T> { type Output = $VectorType<T>; fn $op_fn(self, other: &'a $VectorType<T>) -> $VectorType<T> { $VectorType { x: self.x $op_symbol other.x, y: self.y $op_symbol other.y, z: self.z $op_symbol other.z, } } } // Implement a + b for the cases... // // a: $VectorType, b: &$VectorType // a: &$VectorType, b: $VectorType // a: $VectorType, b: $VectorType // // In each case we forward through to the implementation above. impl<T: $Op<Output = T> + Copy> $Op<$VectorType<T>> for $VectorType<T> { type Output = $VectorType<T>; #[inline] fn $op_fn(self, other: $VectorType<T>) -> $VectorType<T> { &self $op_symbol &other } } impl<'a, T: $Op<Output = T> + Copy> $Op<&'a $VectorType<T>> for $VectorType<T> { type Output = $VectorType<T>; #[inline] fn $op_fn(self, other: &'a $VectorType<T>) -> $VectorType<T> { &self $op_symbol other } } impl<'a, T: $Op<Output = T> + Copy> $Op<$VectorType<T>> for &'a $VectorType<T> { type Output = $VectorType<T>; #[inline] fn $op_fn(self, other: $VectorType<T>) -> $VectorType<T> { self $op_symbol &other } } // Implement a + b where a is type &$VectorType and b is type f32 impl<'a, T: $Op<T, Output = T> + Copy> $Op<T> for &'a $VectorType<T> { type Output = $VectorType<T>; fn $op_fn(self, other: T) -> $VectorType<T> { $VectorType { x: self.x $op_symbol other, y: self.y $op_symbol other, z: self.z $op_symbol other } } } // Implement a + b where... // // a is $VectorType and b is f32 // a is f32 and b is $VectorType // a is f32 and b is &$VectorType // // In each case we forward the logic to the implementation // above. impl<T: $Op<T, Output = T> + Copy> $Op<T> for $VectorType<T> { type Output = $VectorType<T>; #[inline] fn $op_fn(self, other: T) -> $VectorType<T> { &self $op_symbol other } } // impl<T: $Op<Output = T>> $Op<$VectorType<T>> for T { // type Output = $VectorType<T>; // #[inline] // fn $op_fn(self, other: $VectorType<T>) -> $VectorType<T> { // &other $op_symbol self // } // } // impl<'a, T: $Op<Output = T>> $Op<&'a $VectorType<T>> for T { // type Output = $VectorType<T>; // #[inline] // fn $op_fn(self, other: &'a $VectorType<T>) -> $VectorType<T> { // other $op_symbol self // } // } }; } // It also implements unary operators like - a where a is of // type Vec3 or &Vec3. macro_rules! impl_unary_operations { // $VectorType is something like `Vec3` // $Op is something like `Neg` // $op_fn is something like `neg` // $op_symbol is something like `-` ($VectorType:ident $Op:ident $op_fn:ident $op_symbol:tt) => { // Implement the unary operator for references impl<'a, T: $Op<Output = T> + Copy> $Op for &'a $VectorType<T> { type Output = $VectorType<T>; fn $op_fn(self) -> $VectorType<T> { $VectorType { x: $op_symbol self.x, y: $op_symbol self.y, z: $op_symbol self.z, } } } // Have the operator on values forward through to the implementation // above impl<T: $Op<Output = T> + Copy> $Op for $VectorType<T> { type Output = $VectorType<T>; #[inline] fn $op_fn(self) -> $VectorType<T> { $op_symbol &self } } }; } // Implement add-assignment operators like a += b where a and // b is either &Vec3 or Vec3 (in this case a is always of type // &mut Vec3). macro_rules! impl_op_assign { // $VectorType is something like `Vec3` // $OpAssign is something like `AddAssign` // $op_fn is something like `add_assign` // $op_symbol is something like `+=` ($VectorType:ident $Op:ident $OpAssign:ident $op_fn:ident $op_symbol:tt) => { // Implement $OpAssign for RHS &Vec3 impl<'a, T: $Op<T, Output = T> + Copy> $OpAssign<&'a $VectorType<T>> for $VectorType<T> { fn $op_fn(&mut self, other: &'a $VectorType<T>) { *self = $VectorType { x: self.x $op_symbol other.x, y: self.y $op_symbol other.y, z: self.z $op_symbol other.z, }; } } // Implement $OpAssign for RHS Vec3 by forwarding through to the // implementation above impl<T: $Op<T, Output = T> + Copy> $OpAssign<$VectorType<T>> for $VectorType<T> { #[inline] fn $op_fn(&mut self, other: $VectorType<T>) { *self = *self $op_symbol &other } } // Implement $OpAssign for RHS Vec3 by forwarding through to the // implementation above impl<T: $Op<T, Output = T> + Copy> $OpAssign<T> for $VectorType<T> { #[inline] fn $op_fn(&mut self, other: T) { *self = $VectorType { x: self.x $op_symbol other, y: self.y $op_symbol other, z: self.z $op_symbol other, }; } } }; } impl_binary_operations!(Vec3 Add add +); impl_op_assign!(Vec3 Add AddAssign add_assign +); impl_binary_operations!(Vec3 Sub sub -); impl_op_assign!(Vec3 Sub SubAssign sub_assign -); impl_unary_operations!(Vec3 Neg neg -); impl_binary_operations!(Vec3 Mul mul *); impl_op_assign!(Vec3 Mul MulAssign mul_assign *); impl_binary_operations!(Vec3 Div div /); impl_op_assign!(Vec3 Div DivAssign div_assign /); // // An example impl for vector add // impl<T> Add<&Vec3<T>> for Vec3<T> // where // T : Add<Output = T> + Copy, { // type Output = Vec3<T>; // fn add(self, other: &Vec3<T>) -> Vec3<T> { // Vec3 { // x: self.x + other.x, // y: self.y + other.y, // z: self.z + other.z, // } // } // } #[cfg(test)] mod tests { use super::*; #[test] fn add() { let a = Vec3::new(0.0, 1.0, 2.0); let b = Vec3::new(3.0, 4.0, 5.0); assert_eq!(&a + &b, Vec3::new(3.0, 5.0, 7.0)); assert_eq!(a + &b, Vec3::new(3.0, 5.0, 7.0)); assert_eq!(&a + b, Vec3::new(3.0, 5.0, 7.0)); assert_eq!(a + b, Vec3::new(3.0, 5.0, 7.0)); // Test for RHS value type { let mut c = Vec3::one(); c += a; assert_eq!(c, Vec3::new(1.0, 2.0, 3.0)); } // Test for RHS borrowed reference { let mut c = Vec3::one(); c += &a; assert_eq!(c, Vec3::new(1.0, 2.0, 3.0)); } } }
use super::*; use graph::{EdgeT, Graph, NodeT}; #[pymethods] impl EnsmallenGraph { #[staticmethod] #[args(py_kwargs = "**")] #[text_signature = "(edge_path, directed, *, directed_edge_list, sources_column_number, sources_column, destinations_column_number, destinations_column, edge_types_column_number, edge_types_column, default_edge_type, weights_column_number, weights_column, default_weight, skip_self_loops, ignore_duplicated_edges, edge_header, edge_rows_to_skip, edge_separator, node_path, nodes_column_number, nodes_column, node_types_column_number, node_types_column, default_node_type, ignore_duplicated_nodes, node_header, node_rows_to_skip, node_separator, numeric_node_ids, numeric_edge_node_ids, numeric_node_type_ids, numeric_edge_type_ids, edge_file_comment_symbol, node_file_comment_symbol, skip_weights_if_unavailable, skip_edge_types_if_unavailable, skip_node_types_if_unavailable, name, verbose)"] /// Return graph loaded from given edge file and optionally node file. /// /// Parameters /// ------------------------------- /// edge_path: String, /// The path from where load the edge file. /// directed: bool, /// Wethever to load the graph as directed or undirected. /// directed_edge_list: bool = False, /// Wether to load the edge list as directed or undirected. /// The default behaviour is to the list as undirected and handle the /// undirected edges automatically if the parameter `directed=False`. /// sources_column_number: int = 0, /// The column number of the sources of the edges. /// This value is overwritten by the source column value if one is provided. /// If the edge file you are loading does not have a header, remember /// to set the edge_header parameter to false. /// sources_column: str = None, /// Name of the column to be loaded as source of the edges. /// destinations_column_number: int = 1, /// The column number of the destinations of the edges. /// This value is overwritten by the destination column value if one is provided. /// If the edge file you are loading does not have a header, remember /// to set the edge_header parameter to false. /// destinations_column: str = None, /// Name of the column to be loaded as destination of the edges. /// edge_types_column_number: int = None, /// The column number of the edge type of the edges. /// This value is overwritten by the edge types column value if one is provided. /// If the edge file you are loading does not have a header, remember /// to set the edge_header parameter to false. /// edge_types_column: str = None, /// Name of the column to be loaded as edge type of the edges. /// default_edge_type: str = None, /// String representing the default edge type to use when the edge type /// in the provided column is empty. /// weights_column_number: int = None, /// The column number of the weight of the edges. /// This value is overwritten by the weights column value if one is provided. /// If the edge file you are loading does not have a header, remember /// to set the edge_header parameter to false. /// weights_column: str = None, /// Name of the column to be loaded as weight of the edges. /// default_weight: float = None, /// String representing the default edge type to use when the edge type /// in the provided column is empty. /// skip_self_loops: bool = False, /// Wethever to skip self loops while loading the edge file. /// ignore_duplicated_edges: bool = True, /// Wethever to skip duplicated edges while loading the edge file. /// When NOT ignoring the duplicated edges, an exception with information /// on the duplicated edge will be raised. /// When ignoring the edge type while reading the file duplicated edges /// in a multi-graph will be marked as duplicates. /// edge_header: bool = True, /// Wethever to expect the first line of the edge file to be a header. /// edge_rows_to_skip: int = 0, /// If the edge file has some descriptive text in the first few lines, /// this is the parameter that allows you to skip it. /// edge_separator: str = "\t", /// The expected separator for the edge file. /// node_path: str = None, /// The path from where to load the node file. /// If one is not provided, no node types will be loaded and the graph /// might end-up with node IDs that are not aligned with other subgraphs /// from the same edge file. /// nodes_column_number: int = None, /// The column number of the node Ids. /// This value is overwritten by the nodes column value if one is provided. /// If the node file you are loading does not have a header, remember /// to set the node_header parameter to false. /// nodes_column: str = None, /// Name of the column to be loaded as node Ids. /// node_types_column_number: int = None, /// The column number of the node type of the nodes. /// This value is overwritten by the node types column value if one is provided. /// If the node file you are loading does not have a header, remember /// to set the node_header parameter to false. /// node_types_column: str = None, /// Name of the column to be loaded as node types. /// default_node_type: str = None, /// String representing the default node type to use when the node type /// in the provided column is empty. /// ignore_duplicated_nodes: bool = True, /// Wethever to skip duplicated nodes while loading the node file. /// When NOT ignoring the duplicated nodes, an exception with information /// on the duplicated node will be raised. /// node_header: bool = True, /// Wethever to expect the first line of the node file to be a header. /// node_rows_to_skip: int = 0, /// If the node file has some descriptive text in the first few lines, /// this is the parameter that allows you to skip it. /// node_separator: str = "\t", /// The expected separator for the node file. /// numeric_node_ids: bool = False, /// Wether to load the Node Ids as numeric. /// numeric_edge_node_ids: bool = False, /// Wether to load the edge file Node Ids as numeric. /// numeric_node_type_ids: bool = False, /// Wether to load the Node Type Ids as numeric. /// numeric_edge_type_ids: bool = False, /// Wether to load the Edge Type Ids as numeric. /// edge_file_comment_symbol: str = None, /// The symbol to use for the lines to be ignored in the edge file. /// node_file_comment_symbol: str = None, /// The symbol to use for the lines to be ignored in the node file. /// skip_weights_if_unavailable: bool = False, /// Wether to skip the loading of the weights even if requested but /// in the file the column is actually unavailable. /// skip_edge_types_if_unavailable: bool = False, /// Wether to skip the loading of the edge types even if requested but /// in the file the column is actually unavailable. /// skip_node_types_if_unavailable: bool = False, /// Wether to skip the loading of the node types even if requested but /// in the file the column is actually unavailable. /// name: str = "Graph", /// The name of the graph to use. /// verbose: bool = True, /// Wethever to load the files verbosely, showing a loading bar. /// /// Raises /// ------------------------ /// ValueError, /// TODO: Update the list of raised exceptions. /// /// Returns /// ------------------------ /// The loaded graph. fn from_unsorted_csv( edge_path: String, directed: bool, py_kwargs: Option<&PyDict>, ) -> PyResult<EnsmallenGraph> { let _ = ctrlc::set_handler(|| std::process::exit(2)); let (edges, nodes, name, directed_edge_list) = pyex!(build_csv_file_reader(edge_path, py_kwargs))?; Ok(EnsmallenGraph { graph: pyex!(Graph::from_unsorted_csv( edges, nodes, directed, directed_edge_list, name, ))?, }) } #[staticmethod] #[args(py_kwargs = "**")] #[text_signature = "(edge_path, directed, *, directed_edge_list, sources_column_number, sources_column, destinations_column_number, destinations_column, edge_types_column_number, edge_types_column, default_edge_type, weights_column_number, weights_column, default_weight, skip_self_loops, ignore_duplicated_edges, edge_header, edge_rows_to_skip, edge_separator, node_path, nodes_column_number, nodes_column, node_types_column_number, node_types_column, default_node_type, ignore_duplicated_nodes, node_header, node_rows_to_skip, node_separator, numeric_node_ids, numeric_edge_node_ids, numeric_node_type_ids, numeric_edge_type_ids, edge_file_comment_symbol, node_file_comment_symbol, skip_weights_if_unavailable, skip_edge_types_if_unavailable, skip_node_types_if_unavailable, name, verbose, )"] /// Return graph loaded from given edge file and optionally node file. /// /// Parameters /// ------------------------------- /// edge_path: String, /// The path from where load the edge file. /// directed: bool, /// Wethever to load the graph as directed or undirected. /// directed_edge_list: bool = False, /// Wether to load the edge list as directed or undirected. /// The default behaviour is to the list as undirected and handle the /// undirected edges automatically if the parameter `directed=False`. /// sources_column_number: int = 0, /// The column number of the sources of the edges. /// This value is overwritten by the source column value if one is provided. /// If the edge file you are loading does not have a header, remember /// to set the edge_header parameter to false. /// sources_column: str = None, /// Name of the column to be loaded as source of the edges. /// destinations_column_number: int = 1, /// The column number of the destinations of the edges. /// This value is overwritten by the destination column value if one is provided. /// If the edge file you are loading does not have a header, remember /// to set the edge_header parameter to false. /// destinations_column: str = None, /// Name of the column to be loaded as destination of the edges. /// edge_types_column_number: int = None, /// The column number of the edge type of the edges. /// This value is overwritten by the edge types column value if one is provided. /// If the edge file you are loading does not have a header, remember /// to set the edge_header parameter to false. /// edge_types_column: str = None, /// Name of the column to be loaded as edge type of the edges. /// default_edge_type: str = None, /// String representing the default edge type to use when the edge type /// in the provided column is empty. /// weights_column_number: int = None, /// The column number of the weight of the edges. /// This value is overwritten by the weights column value if one is provided. /// If the edge file you are loading does not have a header, remember /// to set the edge_header parameter to false. /// weights_column: str = None, /// Name of the column to be loaded as weight of the edges. /// default_weight: float = None, /// String representing the default edge type to use when the edge type /// in the provided column is empty. /// skip_self_loops: bool = False, /// Wethever to skip self loops while loading the edge file. /// ignore_duplicated_edges: bool = True, /// Wethever to skip duplicated edges while loading the edge file. /// When NOT ignoring the duplicated edges, an exception with information /// on the duplicated edge will be raised. /// When ignoring the edge type while reading the file duplicated edges /// in a multi-graph will be marked as duplicates. /// edge_header: bool = True, /// Wethever to expect the first line of the edge file to be a header. /// edge_rows_to_skip: int = 0, /// If the edge file has some descriptive text in the first few lines, /// this is the parameter that allows you to skip it. /// edge_separator: str = "\t", /// The expected separator for the edge file. /// node_path: str = None, /// The path from where to load the node file. /// If one is not provided, no node types will be loaded and the graph /// might end-up with node IDs that are not aligned with other subgraphs /// from the same edge file. /// nodes_column_number: int = None, /// The column number of the node Ids. /// This value is overwritten by the nodes column value if one is provided. /// If the node file you are loading does not have a header, remember /// to set the node_header parameter to false. /// nodes_column: str = None, /// Name of the column to be loaded as node Ids. /// node_types_column_number: int = None, /// The column number of the node type of the nodes. /// This value is overwritten by the node types column value if one is provided. /// If the node file you are loading does not have a header, remember /// to set the node_header parameter to false. /// node_types_column: str = None, /// Name of the column to be loaded as node types. /// default_node_type: str = None, /// String representing the default node type to use when the node type /// in the provided column is empty. /// ignore_duplicated_nodes: bool = True, /// Wethever to skip duplicated nodes while loading the node file. /// When NOT ignoring the duplicated nodes, an exception with information /// on the duplicated node will be raised. /// node_header: bool = True, /// Wethever to expect the first line of the node file to be a header. /// node_rows_to_skip: int = 0, /// If the node file has some descriptive text in the first few lines, /// this is the parameter that allows you to skip it. /// node_separator: str = "\t", /// The expected separator for the node file. /// numeric_node_ids: bool = False, /// Wether to load the Node Ids as numeric. /// numeric_edge_node_ids: bool = False, /// Wether to load the edge file Node Ids as numeric. /// numeric_node_type_ids: bool = False, /// Wether to load the Node Type Ids as numeric. /// numeric_edge_type_ids: bool = False, /// Wether to load the Edge Type Ids as numeric. /// edge_file_comment_symbol: str = None, /// The symbol to use for the lines to be ignored in the edge file. /// node_file_comment_symbol: str = None, /// The symbol to use for the lines to be ignored in the node file. /// skip_weights_if_unavailable: bool = False, /// Wether to skip the loading of the weights even if requested but /// in the file the column is actually unavailable. /// skip_edge_types_if_unavailable: bool = False, /// Wether to skip the loading of the edge types even if requested but /// in the file the column is actually unavailable. /// skip_node_types_if_unavailable: bool = False, /// Wether to skip the loading of the node types even if requested but /// in the file the column is actually unavailable. /// name: str = "Graph", /// The name of the graph to use. /// verbose: bool = True, /// Wethever to load the files verbosely, showing a loading bar. /// /// Raises /// ------------------------ /// ValueError, /// TODO: Update the list of raised exceptions. /// /// Returns /// ------------------------ /// The loaded graph. fn from_sorted_csv( edge_path: String, directed: bool, nodes_number: NodeT, edges_number: EdgeT, py_kwargs: Option<&PyDict>, ) -> PyResult<EnsmallenGraph> { let _ = ctrlc::set_handler(|| std::process::exit(2)); let (edges, nodes, name, directed_edge_list) = pyex!(build_csv_file_reader(edge_path, py_kwargs))?; Ok(EnsmallenGraph { graph: pyex!(Graph::from_sorted_csv( edges, nodes, directed, directed_edge_list, edges_number, nodes_number, name ))?, }) } }
use sdl2::rect::{Point, Rect}; use sdl2::render::Texture; use std::rc::Rc; use crate::app::application::WindowCanvas; use crate::app::{UpdateResult as UR, UpdateResult}; use crate::renderer::managers::*; use rider_config::*; pub mod buttons; pub mod caret; pub mod file; pub mod file_editor; pub mod filesystem; pub mod icon; pub mod label; pub mod menu_bar; pub mod modal; pub mod project_tree; pub mod scroll_bar; pub mod text_character; pub use self::buttons::*; pub use self::caret::*; pub use self::file::*; pub use self::file_editor::*; pub use self::filesystem::*; pub use self::label::*; pub use self::menu_bar::*; pub use self::modal::*; pub use self::project_tree::*; pub use self::scroll_bar::*; pub use self::text_character::*; use crate::renderer::Renderer; #[derive(Debug)] pub enum UpdateContext<'l> { Nothing, ParentPosition(Point), CurrentFile(&'l mut EditorFile), ScrolledBy(Point), } #[derive(Clone, PartialEq, Debug)] pub enum RenderContext { Nothing, ParentPosition(Point), } #[cfg_attr(tarpaulin, skip)] pub trait CanvasAccess { fn render_rect(&mut self, rect: Rect, color: sdl2::pixels::Color) -> Result<(), String>; fn render_border(&mut self, rect: Rect, color: sdl2::pixels::Color) -> Result<(), String>; fn render_image(&mut self, tex: Rc<Texture>, src: Rect, dest: Rect) -> Result<(), String>; fn render_line( &mut self, start: Point, end: Point, color: sdl2::pixels::Color, ) -> Result<(), String>; fn set_clipping(&mut self, rect: Rect); fn set_clip_rect(&mut self, rect: Option<Rect>); fn clip_rect(&self) -> Option<Rect>; } #[cfg_attr(tarpaulin, skip)] impl CanvasAccess for WindowCanvas { fn render_rect(&mut self, rect: Rect, color: sdl2::pixels::Color) -> Result<(), String> { self.set_draw_color(color); self.fill_rect(rect) } fn render_border(&mut self, rect: Rect, color: sdl2::pixels::Color) -> Result<(), String> { self.set_draw_color(color); self.draw_rect(rect) } fn render_image(&mut self, tex: Rc<Texture>, src: Rect, dest: Rect) -> Result<(), String> { self.copy_ex(&tex, Some(src), Some(dest), 0.0, None, false, false) } fn render_line( &mut self, start: Point, end: Point, color: sdl2::pixels::Color, ) -> Result<(), String> { self.set_draw_color(color); self.draw_line(start, end) } fn set_clipping(&mut self, rect: Rect) { self.set_clip_rect(rect); } fn set_clip_rect(&mut self, rect: Option<Rect>) { self.set_clip_rect(rect); } fn clip_rect(&self) -> Option<Rect> { self.clip_rect() } } #[inline] pub fn build_font_details<T>(config_holder: &T) -> FontDetails where T: ConfigHolder, { let c = config_holder.config().read().unwrap(); ( c.editor_config().font_path(), c.editor_config().character_size(), ) .into() } #[cfg_attr(tarpaulin, skip)] pub fn get_text_character_rect<'l, T>(c: char, renderer: &mut T) -> Option<Rect> where T: Renderer + ConfigHolder, { let font_details = renderer.config().read().unwrap().editor_config().into(); renderer .load_font(font_details) .size_of_char(c) .ok() .and_then(|(width, height)| Some(Rect::new(0, 0, width, height))) } #[inline] pub fn move_render_point(p: Point, d: &Rect) -> Rect { Rect::new(d.x() + p.x(), d.y() + p.y(), d.width(), d.height()) } pub trait Update { fn update(&mut self, ticks: i32, context: &UpdateContext) -> UR; } pub trait ClickHandler { fn on_left_click(&mut self, point: &Point, context: &UpdateContext) -> UR; fn is_left_click_target(&self, point: &Point, context: &UpdateContext) -> bool; } pub trait RenderBox { fn render_start_point(&self) -> Point; fn dest(&self) -> Rect; } pub struct WidgetInner { source: Rect, dest: Rect, config: ConfigAccess, } impl WidgetInner { pub fn new(config: ConfigAccess, source: Rect, dest: Rect) -> Self { Self { dest, source, config, } } } pub trait Widget { fn texture_path(&self) -> Option<String>; fn dest(&self) -> &Rect; fn set_dest(&mut self, rect: &Rect); fn source(&self) -> &Rect; fn set_source(&mut self, rect: &Rect); fn update(&mut self, _ticks: i32, _context: &UpdateContext) -> UpdateResult { UpdateResult::NoOp } fn on_left_click(&mut self, _point: &Point, _context: &UpdateContext) -> UpdateResult { UpdateResult::NoOp } fn is_left_click_target(&self, point: &Point, context: &UpdateContext) -> bool { match *context { UpdateContext::ParentPosition(p) | UpdateContext::ScrolledBy(p) => { move_render_point(p.clone(), &self.dest()) } _ => self.dest().clone(), } .contains_point(point.clone()) } fn render_start_point(&self) -> Point { self.dest().top_left() } fn clipping(&self, relative_dest: &Rect) -> Rect { Rect::new( relative_dest.x(), relative_dest.y(), relative_dest.width() + self.padding_width(), relative_dest.height() + self.padding_height(), ) } fn padding_width(&self) -> u32 { 0 } fn padding_height(&self) -> u32 { 0 } fn use_clipping(&self) -> bool { false } fn render<C, R>(&self, canvas: &mut C, renderer: &mut R, context: &RenderContext) where C: CanvasAccess, R: Renderer + CharacterSizeManager + ConfigHolder, { let mut dest = match context { &RenderContext::ParentPosition(p) => move_render_point(p.clone(), &self.dest()), _ => self.dest().clone(), }; if self.use_clipping() { canvas.set_clipping(self.clipping(&dest)); } self.texture_path() .and_then(|path| renderer.load_image(path).ok()) .and_then(|texture| { dest.set_width(self.dest().width()); dest.set_height(self.dest().height()); canvas .render_image(texture.clone(), self.source().clone(), dest.clone()) .unwrap_or_else(|e| panic!("Failed to draw widget texture. {}", e)); Some(()) }); } fn prepare_ui<'l, T>(&mut self, _renderer: &mut T) where T: Renderer + CharacterSizeManager + ConfigHolder, { } } #[cfg(test)] mod tests { use super::*; use crate::tests::*; use rider_derive::*; use std::ops::{Deref, DerefMut}; struct ConfigWrapper { pub inner: ConfigAccess, } impl ConfigHolder for ConfigWrapper { fn config(&self) -> &ConfigAccess { &self.inner } } struct Dummy { pub inner: WidgetInner, } impl Dummy { pub fn new(config: ConfigAccess) -> Self { Self { inner: WidgetInner::new(config, Rect::new(0, 1, 2, 3), Rect::new(4, 5, 6, 7)), } } } impl Deref for Dummy { type Target = WidgetInner; fn deref(&self) -> &Self::Target { &self.inner } } impl DerefMut for Dummy { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.inner } } impl Widget for Dummy { fn texture_path(&self) -> Option<String> { None } fn dest(&self) -> &Rect { &self.dest } fn set_dest(&mut self, rect: &Rect) { self.dest = rect.clone(); } fn source(&self) -> &Rect { &self.source } fn set_source(&mut self, rect: &Rect) { self.source = rect.clone(); } } #[test] fn must_return_moved_rect() { let rect = Rect::new(10, 20, 30, 40); let point = Point::new(11, 11); assert_eq!(move_render_point(point, &rect), Rect::new(21, 31, 30, 40)); } #[test] fn must_build_font_details() { let config = build_config(); let wrapper = ConfigWrapper { inner: config.clone(), }; let details = build_font_details(&wrapper); let c = config.read().unwrap(); assert_eq!(details.path, c.editor_config().font_path().to_string()); assert_eq!(details.size, c.editor_config().character_size()); } // #[test] // fn mut_return_character_rectangle() { // let config = build_config(); // let mut renderer = SimpleRendererMock::new(config); // let result = get_text_character_rect('a', &mut renderer); // assert_eq!(result, Some(Rect::new(0, 0, 10, 10))); // } #[test] fn check_texture_path() { let config = build_config(); let widget = Dummy::new(config); assert_eq!(widget.texture_path(), None); } #[test] fn check_dest() { let config = build_config(); let widget = Dummy::new(config); assert_eq!(widget.dest(), &Rect::new(4, 5, 6, 7)); } #[test] fn check_set_dest() { let config = build_config(); let mut widget = Dummy::new(config); assert_eq!(widget.set_dest(&Rect::new(9, 8, 7, 6)), ()); assert_eq!(widget.dest(), &Rect::new(9, 8, 7, 6)); } #[test] fn check_source() { let config = build_config(); let widget = Dummy::new(config); assert_eq!(widget.source(), &Rect::new(0, 1, 2, 3)); } #[test] fn check_set_source() { let config = build_config(); let mut widget = Dummy::new(config); assert_eq!(widget.set_source(&Rect::new(9, 8, 7, 6)), ()); assert_eq!(widget.source(), &Rect::new(9, 8, 7, 6)); } #[test] fn check_update() { let config = build_config(); let mut widget = Dummy::new(config); assert_eq!( widget.update(0, &UpdateContext::Nothing), UpdateResult::NoOp ); } #[test] fn check_on_left_click() { let config = build_config(); let mut widget = Dummy::new(config); assert_eq!( widget.on_left_click(&Point::new(0, 1), &UpdateContext::Nothing), UpdateResult::NoOp ); } #[test] fn check_is_left_click_target() { let config = build_config(); let widget = Dummy::new(config); assert_eq!( widget.is_left_click_target(&Point::new(0, 1), &UpdateContext::Nothing), false ); } #[test] fn check_render_start_point() { let config = build_config(); let widget = Dummy::new(config); assert_eq!( widget.render_start_point(), Rect::new(4, 5, 6, 7).top_left() ); } #[test] fn check_clipping() { let config = build_config(); let widget = Dummy::new(config); assert_eq!( widget.clipping(&Rect::new(0, 0, 1, 1)), Rect::new(0, 0, 1, 1) ); } #[test] fn check_padding_width() { let config = build_config(); let widget = Dummy::new(config); assert_eq!(widget.padding_width(), 0); } #[test] fn check_padding_height() { let config = build_config(); let widget = Dummy::new(config); assert_eq!(widget.padding_height(), 0); } #[test] fn check_use_clipping() { let config = build_config(); let widget = Dummy::new(config); assert_eq!(widget.use_clipping(), false); } #[test] fn check_render() { build_test_renderer!(renderer); let widget = Dummy::new(config); assert_eq!( widget.render(&mut canvas, &mut renderer, &RenderContext::Nothing), () ); } #[test] fn check_prepare_ui() { build_test_renderer!(renderer); let mut widget = Dummy::new(config); assert_eq!(widget.prepare_ui(&mut renderer), ()); } }
use std::fs::File; use std::io::BufReader; use std::io::prelude::*; use std::collections::HashSet; // Doing it a low level way that's closer to how I'll do it // in stee. fn main() -> std::io::Result<()> { let f = File::open("src/bin/day01.txt")?; let mut reader = BufReader::new(f); let mut input_buf = vec![]; reader.read_to_end(&mut input_buf)?; let input = input_buf.split(|i| *i == '\n' as u8).collect::<Vec<&[u8]>>(); let mut seen_frequencies = HashSet::new(); let mut frequency : i32 = 0; 'outer: loop { for ref line in &input { if line.len() > 0 { let mut n : i32 = 0; for digit in &line[1..] { n *= 10; n += (digit - '0' as u8) as i32; } let sign = line[0] as char; if sign == '+' { frequency += n; } else { frequency -= n; } if !seen_frequencies.insert(frequency) { break 'outer; } } } } println!("First Repeat: {}", frequency); Ok(()) }
use crate::parsing::basic_parsers::char_that; use crate::parsing::combinators::repeat_1_or_more; use basic_parsers::{any_char, char, eof, tag, whitespace}; use combinators::{all, any, followed, map, not, opt, peek, repeat_0_or_more}; type Result<'a, T> = std::result::Result<T, ParseError<'a>>; type ParseResult<'a, T> = Result<'a, (T, Span<'a>)>; #[derive(Debug)] pub struct ParseError<'a> { pub kind: ParseErrorKind, pub location: Span<'a>, fatal: bool, } #[derive(Debug, PartialEq)] pub enum ParseErrorKind { Context(&'static str), Char(Option<char>), Tag(&'static str), Whitespace, Eof, Repeat(Box<ParseErrorKind>), Not, UnclosedSequence, InvalidToken, } pub trait Spanned<'a> { fn span(&self) -> &Span<'a>; fn span_mut(&mut self) -> &mut Span<'a>; } impl<'a> Spanned<'a> for Span<'a> { fn span(&self) -> &Span<'a> { self } fn span_mut(&mut self) -> &mut Span<'a> { self } } impl<'a> Spanned<'a> for SpannedSexpr<'a> { fn span(&self) -> &Span<'a> { &self.span } fn span_mut(&mut self) -> &mut Span<'a> { &mut self.span } } #[derive(Debug, Copy, Clone)] pub struct Span<'a> { pub text: &'a str, pub start: usize, pub end: usize, } impl<'a> std::ops::Deref for Span<'a> { type Target = str; fn deref(&self) -> &str { &self.text[self.start..self.end] } } impl<'a> Default for Span<'a> { fn default() -> Span<'a> { Span { text: "", start: 0, end: 0, } } } impl<'a> Span<'a> { pub fn new(text: &'a str) -> Self { Span { text, start: 0, end: text.len(), } } pub fn range(from: Span<'a>, to: Span<'a>) -> Self { assert_eq!(from.text, to.text); Span { text: from.text, start: from.start, end: to.end, } } pub fn empty(&self) -> bool { self.end == self.start } pub fn split(&self, n: usize) -> (Self, Self) { let first = Span { text: self.text, start: self.start, end: self.start + n, }; let second = Span { text: self.text, start: self.start + n, end: self.end, }; (first, second) } pub fn as_str(&self) -> &str { &self.text[self.start..self.end] } } #[derive(Debug)] pub struct SpannedSexpr<'a> { pub span: Span<'a>, pub expr: Sexpr<'a>, } impl<'a> PartialEq for SpannedSexpr<'a> { fn eq(&self, other: &Self) -> bool { self.expr.eq(&other.expr) } } #[derive(Debug, PartialEq)] pub enum Sexpr<'a> { Nil, True, False, Symbol(&'a str), String(&'a str), Integer(i64), Float(f64), List(Vec<SpannedSexpr<'a>>), Vector(Vec<SpannedSexpr<'a>>), Dot, } pub fn parse(src: &str) -> Result<Vec<SpannedSexpr>> { let mut exprs = vec![]; let mut src = Span::new(src); while !src.is_empty() { let (expr, rest) = parse_sexpr(src)?; src = rest; exprs.push(expr); } Ok(exprs) } pub fn parse_sexpr(src: Span) -> ParseResult<SpannedSexpr> { let (_, src) = opt(parse_intertoken_space)(src)?; let (expr, rest) = any(( any((parse_abbreviation, parse_dot, parse_boolean, parse_symbol)), any((parse_list, parse_vector, parse_string, parse_number)), parse_invalid, ))(src)?; let (_, rest) = opt(parse_intertoken_space)(rest)?; Ok((expr, rest)) } fn parse_intertoken_space(src: Span) -> ParseResult<Span> { repeat_1_or_more(any((whitespace, parse_comment)))(src) } fn parse_invalid<T>(input: Span) -> ParseResult<T> { Err(ParseError { kind: ParseErrorKind::InvalidToken, location: input, fatal: true, }) } fn parse_comment(src: Span) -> ParseResult<Span> { all(( char(';'), repeat_0_or_more(all((not(parse_newline), any_char))), ))(src) } fn parse_newline(src: Span) -> ParseResult<Span> { (char('\n'))(src) } fn parse_dot(input: Span) -> ParseResult<SpannedSexpr> { followed( map(char('.'), |span| span.into_spanned(Sexpr::Dot)), peek(parse_delimiter), )(input) } fn parse_list(input: Span) -> ParseResult<SpannedSexpr> { let (open, rest) = char('(')(input)?; let (list, rest) = parse_sequence(rest)?; let (close, rest) = char(')')(rest).map_err(|_| ParseError { kind: ParseErrorKind::UnclosedSequence, location: input, fatal: true, })?; let final_span = Span::range(open, close); Ok((final_span.into_spanned(Sexpr::List(list)), rest)) } fn parse_vector(input: Span) -> ParseResult<SpannedSexpr> { let (open, rest) = tag("#(")(input)?; let (list, rest) = parse_sequence(rest)?; let (close, rest) = char(')')(rest)?; let final_span = Span::range(open, close); Ok((final_span.into_spanned(Sexpr::Vector(list)), rest)) } fn parse_sequence(input: Span) -> ParseResult<Vec<SpannedSexpr>> { let (_, mut rest) = opt(whitespace)(input).unwrap(); let mut seq = vec![]; loop { if let Ok(_) = char(')')(rest) { return Ok((seq, rest)); } if let Ok(_) = eof(rest) { return Ok((seq, rest)); } let (item, r) = parse_sexpr(rest)?; seq.push(item); let (_, r) = opt(whitespace)(r).unwrap(); rest = r; } } fn parse_boolean(input: Span) -> ParseResult<SpannedSexpr> { followed(any((parse_true, parse_false)), peek(parse_delimiter))(input).map_err(|_| ParseError { kind: ParseErrorKind::Context("Expected boolean: #t, #f, #true, or #false"), location: input, fatal: false, }) } fn parse_true(input: Span) -> ParseResult<SpannedSexpr> { map(any((tag("#true"), tag("#t"))), |span| { span.into_spanned(Sexpr::True) })(input) } fn parse_false(input: Span) -> ParseResult<SpannedSexpr> { map(any((tag("#false"), tag("#f"))), |span| { span.into_spanned(Sexpr::False) })(input) } fn parse_number(input: Span) -> ParseResult<SpannedSexpr> { let (num, rest) = repeat_1_or_more(all((not(parse_delimiter), any_char)))(input)?; if let Ok(i) = num.parse() { return Ok((num.into_spanned(Sexpr::Integer(i)), rest)); } if let Ok(f) = num.parse() { return Ok((num.into_spanned(Sexpr::Float(f)), rest)); } Err(ParseError { kind: ParseErrorKind::Context("Expected number"), location: input, fatal: false, }) } fn parse_symbol(input: Span) -> ParseResult<SpannedSexpr> { map( any(( all(( parse_symbol_initial, repeat_0_or_more(parse_symbol_subsequent), )), //all((char('|'), repeat_0_or_more(parse_symbol_element), char('|'))), //not yet implemented parse_peculiar_identifier, )), |span| span.into_spanned(Sexpr::Symbol(&span.text[span.start..span.end])), )(input) } fn parse_symbol_initial(input: Span) -> ParseResult<Span> { any(( char_that(char::is_alphabetic), any((char('!'), char('$'), char('%'), char('&'), char('*'))), any((char('/'), char(':'), char('<'), char('='), char('>'))), any((char('?'), char('^'), char('_'), char('~'))), ))(input) } fn parse_symbol_subsequent(input: Span) -> ParseResult<Span> { any(( parse_symbol_initial, char_that(|ch| ch.is_ascii_digit()), any((parse_explicit_sign, char('.'), char('@'))), ))(input) } fn parse_peculiar_identifier(input: Span) -> ParseResult<Span> { any(( all(( parse_explicit_sign, parse_sign_subsequent, repeat_0_or_more(parse_symbol_subsequent), )), all(( parse_explicit_sign, char('.'), parse_dot_subsequent, repeat_0_or_more(parse_symbol_subsequent), )), all(( char('.'), parse_dot_subsequent, repeat_0_or_more(parse_symbol_subsequent), )), parse_explicit_sign, ))(input) } fn parse_dot_subsequent(input: Span) -> ParseResult<Span> { any((parse_sign_subsequent, char('.')))(input) } fn parse_sign_subsequent(input: Span) -> ParseResult<Span> { any((parse_symbol_initial, parse_explicit_sign, char('@')))(input) } fn parse_explicit_sign(input: Span) -> ParseResult<Span> { any((char('+'), char('-')))(input) } fn parse_string(input: Span) -> ParseResult<SpannedSexpr> { let (open, rest) = char('"')(input)?; let (span, rest) = repeat_0_or_more(all((not(char('"')), any_char)))(rest)?; let (close, rest) = char('"')(rest)?; let final_span = Span::range(open, close); Ok(( final_span.into_spanned(Sexpr::String(&span.text[span.start..span.end])), rest, )) } fn parse_abbreviation(input: Span) -> ParseResult<SpannedSexpr> { any(( map(tag("'()"), |span| span.into_spanned(Sexpr::Nil)), expand("'", "quote"), expand("`", "quasiquote"), expand(",@", "unquote-splicing"), expand(",", "unquote"), ))(input) } fn expand<'a>( prefix: &'static str, symbol: &'static str, ) -> impl Fn(Span<'a>) -> ParseResult<'a, SpannedSexpr<'a>> { move |input| { let (prefix, rest) = tag(prefix)(input)?; let (expr, rest) = parse_sexpr(rest)?; let final_span = Span::range(prefix, expr.span); Ok(( final_span.into_spanned(Sexpr::List(vec![ prefix.into_spanned(Sexpr::Symbol(symbol)), expr, ])), rest, )) } } fn parse_delimiter(input: Span) -> ParseResult<Span> { any((eof, tag("("), tag(")"), whitespace))(input).map_err(|pe| ParseError { kind: ParseErrorKind::Context("Expected delimiter"), ..pe }) } impl<'a> PartialEq<Sexpr<'a>> for SpannedSexpr<'a> { fn eq(&self, rhs: &Sexpr<'a>) -> bool { self.expr.eq(rhs) } } impl<'a> PartialEq<Vec<Sexpr<'a>>> for Sexpr<'a> { fn eq(&self, rhs: &Vec<Sexpr<'a>>) -> bool { match self { Sexpr::List(x) => x == rhs, Sexpr::Vector(x) => x == rhs, _ => false, } } } impl<'a> PartialEq<Sexpr<'a>> for Vec<Sexpr<'a>> { fn eq(&self, rhs: &Sexpr<'a>) -> bool { rhs.eq(self) } } trait IntoSpannedSexpr<'a> { fn into_spanned(self, expr: Sexpr<'a>) -> SpannedSexpr<'a>; } impl<'a> IntoSpannedSexpr<'a> for Span<'a> { fn into_spanned(self, expr: Sexpr<'a>) -> SpannedSexpr<'a> { SpannedSexpr { span: self, expr } } } mod basic_parsers { use super::{ParseError, ParseErrorKind, ParseResult, Span}; pub fn tag<'a>(tag: &'static str) -> impl Fn(Span<'a>) -> ParseResult<'a, Span<'a>> { move |input: Span| -> ParseResult<Span> { if input.starts_with(tag) { Ok(input.split(tag.len())) } else { Err(ParseError { kind: ParseErrorKind::Tag(tag), location: input, fatal: false, }) } } } pub fn char<'a>(tag: char) -> impl Fn(Span<'a>) -> ParseResult<'a, Span<'a>> { move |input: Span| -> ParseResult<Span> { if input.starts_with(tag) { Ok(input.split(tag.len_utf8())) } else { Err(ParseError { kind: ParseErrorKind::Char(Some(tag)), location: input, fatal: false, }) } } } pub fn char_that<'a>( predicate: impl Fn(char) -> bool, ) -> impl Fn(Span<'a>) -> ParseResult<'a, Span<'a>> { move |input: Span| -> ParseResult<Span> { match input.chars().next() { Some(ch) if predicate(ch) => Ok(input.split(ch.len_utf8())), _ => Err(ParseError { kind: ParseErrorKind::Char(None), location: input, fatal: false, }), } } } pub fn any_char(input: Span) -> ParseResult<Span> { char_that(|_| true)(input) } pub fn eof<'a>(input: Span) -> ParseResult<Span> { if input.is_empty() { Ok((input, input)) } else { Err(ParseError { kind: ParseErrorKind::Eof, location: input, fatal: false, }) } } pub fn whitespace(input: Span) -> ParseResult<Span> { match input.char_indices().find(|(_, ch)| !ch.is_whitespace()) { None if input.is_empty() => Err(ParseError { kind: ParseErrorKind::Whitespace, location: input, fatal: false, }), None => Ok(input.split(input.len())), Some((0, _)) => Err(ParseError { kind: ParseErrorKind::Whitespace, location: input, fatal: false, }), Some((i, _)) => Ok(input.split(i)), } } } mod combinators { use super::{ParseError, ParseErrorKind, ParseResult, Span, Spanned}; pub fn peek<'a, T>( parser: impl Fn(Span<'a>) -> ParseResult<'a, T>, ) -> impl Fn(Span<'a>) -> ParseResult<'a, T> { move |input: Span<'a>| -> ParseResult<'a, T> { parser(input).map(|(x, _)| (x, input)) } } pub fn opt<'a, T>( parser: impl Fn(Span<'a>) -> ParseResult<'a, T>, ) -> impl Fn(Span<'a>) -> ParseResult<Option<T>> { move |input: Span<'a>| match parser(input) { Ok((out, rest)) => Ok((Some(out), rest)), Err(e) if !e.fatal => Ok((None, input)), Err(e) => Err(e), } } pub fn not<'a, T>( parser: impl Fn(Span<'a>) -> ParseResult<'a, T>, ) -> impl Fn(Span<'a>) -> ParseResult<'a, Span<'a>> { move |input: Span<'a>| match parser(input) { Ok(_) => Err(ParseError { kind: ParseErrorKind::Not, location: input, fatal: false, }), Err(e) if !e.fatal => Ok(input.split(0)), Err(e) => Err(e), } } pub fn followed<'a, T, Z>( first: impl Fn(Span<'a>) -> ParseResult<'a, T>, by: impl Fn(Span<'a>) -> ParseResult<'a, Z>, ) -> impl Fn(Span<'a>) -> ParseResult<'a, T> { move |input: Span<'a>| -> ParseResult<'a, T> { let (a, rest) = first(input)?; let (_, rest) = by(rest)?; Ok((a, rest)) } } pub fn map<'a, T, U>( parser: impl Fn(Span<'a>) -> ParseResult<'a, T>, func: impl Fn(T) -> U, ) -> impl Fn(Span<'a>) -> ParseResult<'a, U> { move |input: Span<'a>| -> ParseResult<'a, U> { parser(input).map(|(x, rest)| (func(x), rest)) } } pub fn repeat_0_or_more<'a>( parser: impl Fn(Span<'a>) -> ParseResult<'a, Span<'a>>, ) -> impl Fn(Span<'a>) -> ParseResult<'a, Span<'a>> { move |input: Span<'a>| -> ParseResult<'a, Span<'a>> { let mut rest = input; let mut matched = Span { end: input.start, ..input }; while let Ok(x) = parser(rest) { matched.end = x.0.end; rest = x.1; } Ok((matched, rest)) } } pub fn repeat_1_or_more<'a>( parser: impl Fn(Span<'a>) -> ParseResult<'a, Span<'a>>, ) -> impl Fn(Span<'a>) -> ParseResult<'a, Span<'a>> { move |input: Span<'a>| -> ParseResult<'a, Span<'a>> { let mut rest = input; let mut matched = Span { end: input.start, ..input }; loop { match parser(rest) { Ok(x) => { matched.end = x.0.end; rest = x.1; } Err(mut e) => { if matched.is_empty() { e.kind = ParseErrorKind::Repeat(Box::new(e.kind)); return Err(e); } else { return Ok((matched, rest)); } } } } } } pub fn any<'a, T: Spanned<'a>>( parsers: impl ParserSequence<'a, T>, ) -> impl Fn(Span<'a>) -> ParseResult<'a, T> { move |input: Span<'a>| -> ParseResult<'a, T> { parsers.parse_or(input) } } pub fn all<'a, T: Spanned<'a>>( parsers: impl ParserSequence<'a, T>, ) -> impl Fn(Span<'a>) -> ParseResult<'a, T> { move |input: Span<'a>| -> ParseResult<'a, T> { parsers.parse_and(input) } } pub trait ParserSequence<'a, T: Spanned<'a>> { fn parse_or(&self, input: Span<'a>) -> ParseResult<'a, T>; fn parse_and(&self, input: Span<'a>) -> ParseResult<'a, T>; } impl<'a, A, B, T: Spanned<'a>> ParserSequence<'a, T> for (A, B) where A: Fn(Span<'a>) -> ParseResult<'a, T>, B: Fn(Span<'a>) -> ParseResult<'a, T>, { fn parse_or(&self, input: Span<'a>) -> ParseResult<'a, T> { match self.0(input) { Ok(x) => Ok(x), Err(e) if e.fatal => Err(e), Err(_) => self.1(input), } } fn parse_and(&self, input: Span<'a>) -> ParseResult<'a, T> { self.0(input) .and_then(|(_, rest)| self.1(rest)) .map(|(mut out, rest)| { out.span_mut().start = input.start; (out, rest) }) } } impl<'a, A, B, C, T: Spanned<'a>> ParserSequence<'a, T> for (A, B, C) where A: Fn(Span<'a>) -> ParseResult<'a, T>, B: Fn(Span<'a>) -> ParseResult<'a, T>, C: Fn(Span<'a>) -> ParseResult<'a, T>, { fn parse_or(&self, input: Span<'a>) -> ParseResult<'a, T> { any((&self.0, any((&self.1, &self.2))))(input) } fn parse_and(&self, input: Span<'a>) -> ParseResult<'a, T> { self.0(input) .and_then(|(_, rest)| self.1(rest)) .and_then(|(_, rest)| self.2(rest)) .map(|(mut out, rest)| { out.span_mut().start = input.start; (out, rest) }) } } impl<'a, A, B, C, D, T: Spanned<'a>> ParserSequence<'a, T> for (A, B, C, D) where A: Fn(Span<'a>) -> ParseResult<'a, T>, B: Fn(Span<'a>) -> ParseResult<'a, T>, C: Fn(Span<'a>) -> ParseResult<'a, T>, D: Fn(Span<'a>) -> ParseResult<'a, T>, { fn parse_or(&self, input: Span<'a>) -> ParseResult<'a, T> { any((any((&self.0, &self.1)), any((&self.2, &self.3))))(input) } fn parse_and(&self, input: Span<'a>) -> ParseResult<'a, T> { self.0(input) .and_then(|(_, rest)| self.1(rest)) .and_then(|(_, rest)| self.2(rest)) .and_then(|(_, rest)| self.3(rest)) .map(|(mut out, rest)| { out.span_mut().start = input.start; (out, rest) }) } } impl<'a, A, B, C, D, E, T: Spanned<'a>> ParserSequence<'a, T> for (A, B, C, D, E) where A: Fn(Span<'a>) -> ParseResult<'a, T>, B: Fn(Span<'a>) -> ParseResult<'a, T>, C: Fn(Span<'a>) -> ParseResult<'a, T>, D: Fn(Span<'a>) -> ParseResult<'a, T>, E: Fn(Span<'a>) -> ParseResult<'a, T>, { fn parse_or(&self, input: Span<'a>) -> ParseResult<'a, T> { any((&self.0, &self.1, &self.2, any((&self.3, &self.4))))(input) } fn parse_and(&self, input: Span<'a>) -> ParseResult<'a, T> { self.0(input) .and_then(|(_, rest)| self.1(rest)) .and_then(|(_, rest)| self.2(rest)) .and_then(|(_, rest)| self.3(rest)) .and_then(|(_, rest)| self.4(rest)) .map(|(mut out, rest)| { out.span_mut().start = input.start; (out, rest) }) } } } #[cfg(test)] mod tests { use super::*; macro_rules! compare { ($expected:expr, $actual:expr) => { match $actual { Ok((ex, rest)) => { assert_eq!(ex.expr, $expected); assert!(rest.empty()) } Err(e) => panic!("{:#?}", e), } }; ($expected:expr, _, $actual:expr) => { match $actual { Ok((ex, _)) => { assert_eq!(ex.expr, $expected); } Err(e) => panic!("{:#?}", e), } }; } macro_rules! fail { ($actual:expr) => { assert!($actual.is_err()) }; } #[test] fn bool_parsing() { compare!(Sexpr::True, parse_boolean(Span::new("#true"))); compare!(Sexpr::False, parse_boolean(Span::new("#false"))); compare!(Sexpr::True, parse_boolean(Span::new("#t"))); compare!(Sexpr::False, parse_boolean(Span::new("#f"))); fail!(parse_boolean(Span::new("#turnip"))); } #[test] fn symbol_parsing() { compare!( Sexpr::Symbol("abc-def!"), parse_symbol(Span::new("abc-def!")) ); compare!(Sexpr::Symbol("x"), _, parse_symbol(Span::new("x(y)"))); compare!(Sexpr::Symbol("x"), _, parse_symbol(Span::new("x y"))); compare!(Sexpr::Symbol("..."), _, parse_symbol(Span::new("..."))); compare!(Sexpr::Symbol("-x"), _, parse_symbol(Span::new("-x"))); } #[test] fn list_parsing() { compare!( vec![Sexpr::Symbol("x"), Sexpr::Symbol("y"), Sexpr::Symbol("z")], parse_list(Span::new("(x y z)")) ); compare!( vec![Sexpr::Symbol("x"), Sexpr::Symbol("y"), Sexpr::Symbol("z")], parse_list(Span::new("( x y z )")) ); compare!( vec![ Sexpr::Symbol("x"), Sexpr::Symbol("y"), Sexpr::Dot, Sexpr::Symbol("z") ], parse_list(Span::new("(x y . z)")) ); } #[test] fn nested_list_parsing() { let result = parse_list(Span::new("(x ((y) z))")).unwrap().0.expr; if let Sexpr::List(items) = result { assert_eq!(items.len(), 2); assert_eq!(items[0].expr, Sexpr::Symbol("x")); if let Sexpr::List(items) = &items[1].expr { assert_eq!(items.len(), 2); if let Sexpr::List(items) = &items[0].expr { assert_eq!(items.len(), 1); assert_eq!(items[0].expr, Sexpr::Symbol("y")); } else { panic!("inner-most list not parsed correctly") } assert_eq!(items[1].expr, Sexpr::Symbol("z")); } else { panic!("inner list not parsed correctly") } } else { panic!("outer list not parsed correctly") } } #[test] fn vector_parsing() { compare!( vec![Sexpr::Symbol("x"), Sexpr::Symbol("y"), Sexpr::Symbol("z")], parse_vector(Span::new("#( x y z )")) ); } #[test] fn number_parsing() { compare!(Sexpr::Integer(42), parse_number(Span::new("42"))); compare!(Sexpr::Integer(-24), parse_number(Span::new("-24"))); compare!(Sexpr::Float(3.1415), parse_number(Span::new("3.1415"))); fail!(parse_number(Span::new("1x2y3"))) } #[test] fn string_parsing() { compare!( Sexpr::String("42 )(foo-bar)"), parse_string(Span::new("\"42 )(foo-bar)\"")) ); } #[test] fn quotation_parsing() { compare!( vec![Sexpr::Symbol("quote"), Sexpr::Symbol("abc")], parse_abbreviation(Span::new("'abc")) ); } #[test] fn quasiquotation_parsing() { compare!( vec![Sexpr::Symbol("quasiquote"), Sexpr::Symbol("abc")], parse_abbreviation(Span::new("`abc")) ); } #[test] fn unquotation_parsing() { compare!( vec![Sexpr::Symbol("unquote"), Sexpr::Symbol("abc")], parse_abbreviation(Span::new(",abc")) ); } #[test] fn splicing_unquotation_parsing() { compare!( vec![Sexpr::Symbol("unquote-splicing"), Sexpr::Symbol("abc")], parse_abbreviation(Span::new(",@abc")) ); } #[test] fn comment_parsing() { compare!( vec![Sexpr::Symbol("x"), Sexpr::Symbol("y"), Sexpr::Symbol("z")], parse_sexpr(Span::new( "; preceding comment\n (x ; inner comment\n y z) ; trailing comment\n" )) ); } #[test] fn sexpr_parsing() { let x = Span::new("(abc 123 (4.5 \"y\" . z))"); parse_sexpr(x).unwrap(); } }
#![deny(warnings)] use hyper::service::{make_service_fn, service_fn}; use hyper::{Body, Client, Request, Response, Server}; use std::{convert::Infallible, net::SocketAddr}; async fn hello(mut req: Request<Body>) -> Result<Response<Body>, hyper::Error> { println!("--- {:?}", req.headers()); // let body_buf = hyper::body::to_bytes(req.into_body()).await.unwrap(); let client = Client::builder() // .pool_idle_timeout(Duration::from_secs(30)) .http2_only(true) .build_http::<Body>(); let out_addr: SocketAddr = ([127, 0, 0, 1], 4567).into(); // let out_addr_clone = out_addr.clone(); // Ok(Response::new(Body::from("Hello World!"))) let uri_string = format!( "http://{}{}", out_addr, req.uri() .path_and_query() .map(|x| x.as_str()) .unwrap_or("/") ); println!("{}", uri_string); let uri = uri_string.parse().unwrap(); *req.uri_mut() = uri; // let header = req.headers(); // println!("{:?}", header); client.request(req).await } #[tokio::main] async fn main() { // pretty_env_logger::init(); let in_addr = ([127, 0, 0, 1], 3001).into(); let out_addr: SocketAddr = ([127, 0, 0, 1], 50051).into(); let out_addr_clone = out_addr.clone(); let client_main = Client::builder() // .pool_idle_timeout(Duration::from_secs(30)) .http2_only(true) .build_http::<Body>(); // The closure inside `make_service_fn` is run for each connection, // creating a 'service' to handle requests for that specific connection. let make_service = make_service_fn(|_| async move { let client = client_main.clone(); // let remote_addr = socket.remote_addr(); async move { Ok::<_, Infallible>(service_fn(move |_: Request<Body>| async move { Ok::<_, Infallible>(Response::new(Body::from(format!( "Hello, {}!", "remote_addr" )))) })) } }); let make_service = make_service_fn(|_conn| { // This is the `Service` that will handle the connection. // `service_fn` is a helper to convert a function that // returns a Response into a `Service`. async { Ok::<_, Infallible>(service_fn(hello)) } }); let server = Server::bind(&in_addr).serve(make_service); println!("Listening on http://{}", in_addr); println!("Proxying on http://{}", out_addr); if let Err(e) = server.await { eprintln!("server error: {}", e); } }
use std::fs::File; use std::io::{self, BufRead}; use std::path::Path; use std::collections::{HashSet, HashMap}; use itertools::Itertools; use std::convert::TryInto; fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>> where P: AsRef<Path>, { let file = File::open(filename)?; Ok(io::BufReader::new(file).lines()) } fn main() { let filename = "/home/remy/AOC/2020/17/input"; const N: usize = 4; let mut space: HashSet<[i8; N]> = HashSet::new(); if let Ok(lines) = read_lines(filename) { for (i, row) in lines.enumerate() { if let Ok(row) = row { for (j, item) in row.chars().enumerate() { if item == '#' { let mut position: [i8; N] = [0; N]; position[N-2] = i as i8; position[N-1] = j as i8; space.insert(position); } } } } let mut shifts: HashSet<[i8; N]> = HashSet::new(); let multi_prod = (0..N).map(|_| -1..=1).multi_cartesian_product(); for combination in multi_prod { shifts.insert(combination.try_into().unwrap_or_else(|v: Vec<i8>| panic!("Expected length {} and got {}", N, v.len()))); } let zero: [i8; N] = [0; N]; for _ in 0..6 { let occupied = space.clone(); let mut birth: HashMap<[i8; N], usize> = HashMap::new(); for item in &occupied { let mut active = 0; for shift in &shifts { if *shift == zero { continue; } let mut res: [i8; N] = *item; for i in 0..N { res[i] += shift[i]; } let entry = birth.entry(res).or_insert(0); *entry += 1; match occupied.get(&res) { Some(_) => active += 1, None => (), } } if active < 2 || active > 3 { space.remove(item); } } for (coordinates, neighbors) in birth.iter() { if *neighbors == 3 { match occupied.get(coordinates) { Some(_) => (), None => { space.insert(*coordinates); } } } } } println!("Active Cells: {}", space.len()); } else { println!("Error"); } }
use std::thread; use std::time::Duration; use std::sync::{Mutex, Arc}; fn main() { let a = 1; let b = 2; let m1 = Arc::new(Mutex::new(a)); let m2 = Arc::new(Mutex::new(b)); let am1 = Arc::clone(&m1); let am2 = Arc::clone(&m1); let bm1 = Arc::clone(&m2); let bm2 = Arc::clone(&m2); let h1 = thread::spawn(move || { println!("thread 1 locks a"); let v1 = am1.lock().unwrap(); thread::sleep(Duration::from_millis(100)); println!("thread 1 locks b"); let v2 = bm1.lock().unwrap(); println!("thread 1 locked b"); }); let h2 = thread::spawn(move || { println!("thread 2 locks b"); let v2 = bm2.lock().unwrap(); thread::sleep(Duration::from_millis(100)); println!("thread 2 locks a"); let v1 = am2.lock().unwrap(); println!("thread 2 locked a"); }); h1.join().unwrap(); h2.join().unwrap(); }
use std::fs::File; use std::io::{self, prelude::*, BufReader, SeekFrom}; use std::path::Path; use bzip2::read::BzDecoder; type BZipReader = BufReader<BzDecoder<BufReader<File>>>; /// Create a bzip2 BufReader from a File handle. pub fn to_decode_buffer(file: File) -> BZipReader { let buf = BufReader::with_capacity(8192 * 4, file); let dec = BzDecoder::new(buf); BufReader::with_capacity(8192 * 16, dec) } /// Open a bzip2 file. pub fn open_bzip<P: AsRef<Path>>(path: P) -> io::Result<BZipReader> { let file = File::open(path)?; Ok(to_decode_buffer(file)) } /// Open a bzip2 multistream and seek to a zip file at a given index. pub fn open_seek_bzip<P: AsRef<Path>>(path: P, index: usize) -> io::Result<BZipReader> { let mut file = File::open(path)?; file.seek(SeekFrom::Start(index as u64))?; Ok(to_decode_buffer(file)) }
fn gen_next_seq(seq:Vec<usize>, steps:i32) ->Vec<usize>{ if steps <= 0 { return seq.clone(); } let mut next:Vec<usize> = Vec::new(); let mut i = 0; while i < seq.len() { let mut n:usize = 1; for j in i..seq.len()-1{ if seq[j] == seq[j+1]{ n+=1; continue; } break; } let v = seq[i]; next.push(n); next.push(v); i=i+ n; } return gen_next_seq(next, steps-1); } pub fn run(){ let start="1113122113"; let seq:Vec<usize> = start .chars() .map(|c|c.to_string()) .map(|s|s.parse::<usize>().unwrap()) .collect(); let run40 = gen_next_seq(seq, 40); let run40_len = run40.len(); let run50 = gen_next_seq(run40, 10); println!("\n-- AoC 2015: -- Day 10: Elves Look, Elves Say --"); println!("\n After 4๏ธโƒฃ 0๏ธโƒฃ iterations: {0: >10} \n After 5๏ธโƒฃ 0๏ธโƒฃ iterations: {1: >10}", run40_len, run50.len()); println!("\n-- DONE --\n"); }
extern crate mio; use mio::*; use mio::tcp::*; use mio::util::Slab; use std::net::SocketAddr; use std::str::FromStr; use std::mem; struct Server { socket: TcpListener, connections: Slab<Connection>, } impl Server { fn new(socket: TcpListener) -> Server { Server { socket: socket, connections: Slab::new_starting_at(mio::Token(1), 1024), } } fn conn_readable(&mut self, event_loop: &mut EventLoop<Server>, token: Token) { // self.connections[token].readable(event_loop); self.connections[token].read(event_loop); } } impl Handler for Server { type Timeout = (); type Message = (); fn ready(&mut self, event_loop: &mut EventLoop<Server>, token: Token, events: EventSet) { match token { mio::Token(0) => { match self.socket.accept() { Ok(Some(socket_token)) => { match socket_token { (socket, _addr) => { println!("accepted a new client socket"); let token = self.connections .insert_with(|token| { Connection::new(socket, token) }) .unwrap(); event_loop.register(&self.connections[token].socket, token, EventSet::readable(), PollOpt::edge() | PollOpt::oneshot()) .unwrap(); } } } Ok(None) => { println!("the server socket wasn't actually ready"); } Err(e) => { println!("encountered error while acceptiong connection; err={:?}", e); event_loop.shutdown(); } } } i => { if events.is_readable() { self.conn_readable(event_loop, i); } } } } } struct Connection { socket: TcpStream, token: Token, state: State, } impl Connection { fn new(socket: TcpStream, token: Token) -> Connection { Connection { socket: socket, token: token, state: State::Reading(vec![]), } } fn read(&mut self, event_loop: &mut EventLoop<Server>) { match self.socket.try_read_buf(self.state.mut_read_buf()) { Ok(Some(n)) => { println!("read {} bytes", n); self.state.try_transition_to_writing(); self.reregister(event_loop); } Ok(None) => {} Err(e) => {} } } fn reregister(&self, event_loop: &mut EventLoop<Server>) { let event_set = match self.state { State::Reading(..) => EventSet::readable(), _ => {} }; event_loop.reregister(&self.socket, self.token, event_set, mio::PollOpt::oneshot()) .unwrap(); } } enum State { Reading(Vec<u8>), Closed, } impl State { fn try_transition_to_writing(&self) { if let Some(pos) = self.read_buf().iter().position(|b| *b == b'\n') { self.transition_to_writingg(pos + 1); } } fn transition_to_writingg(&mut self, pos: usize) { let buf = mem::replace(self, State::Closed).unwrap_read_buf(); } fn unwrap_read_buf(self) -> Vec<u8> {} fn read_buf(&self) -> &[u8] { match *self { State::Reading(ref buf) => buf, _ => panic!("connection not in reading state"), } } fn mut_read_buf(&mut self) -> &mut Vec<u8> { match *self { State::Reading(ref mut buf) => buf, // _ => panic!("connection not in reading state"), } } } fn start(socket_addr: SocketAddr) { let socket = TcpListener::bind(&socket_addr).unwrap(); let mut event_loop: EventLoop<Server> = mio::EventLoop::new().unwrap(); event_loop.register(&socket, mio::Token(0), EventSet::readable(), PollOpt::edge()) .unwrap(); let mut server = Server::new(socket); event_loop.run(&mut server).unwrap(); } fn main() { start(SocketAddr::from_str("127.0.0.1:5555").unwrap()); }
#[doc = "Register `PRIVCFGR1` reader"] pub type R = crate::R<PRIVCFGR1_SPEC>; #[doc = "Register `PRIVCFGR1` writer"] pub type W = crate::W<PRIVCFGR1_SPEC>; #[doc = "Field `AESPRIV` reader - AESPRIV"] pub type AESPRIV_R = crate::BitReader; #[doc = "Field `AESPRIV` writer - AESPRIV"] pub type AESPRIV_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `RNGPRIV` reader - RNGPRIV"] pub type RNGPRIV_R = crate::BitReader; #[doc = "Field `RNGPRIV` writer - RNGPRIV"] pub type RNGPRIV_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `SUBGHZSPIPRIV` reader - SUBGHZSPIPRIV"] pub type SUBGHZSPIPRIV_R = crate::BitReader; #[doc = "Field `SUBGHZSPIPRIV` writer - SUBGHZSPIPRIV"] pub type SUBGHZSPIPRIV_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `PKAPRIV` reader - PKAPRIV"] pub type PKAPRIV_R = crate::BitReader; #[doc = "Field `PKAPRIV` writer - PKAPRIV"] pub type PKAPRIV_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 2 - AESPRIV"] #[inline(always)] pub fn aespriv(&self) -> AESPRIV_R { AESPRIV_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - RNGPRIV"] #[inline(always)] pub fn rngpriv(&self) -> RNGPRIV_R { RNGPRIV_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 4 - SUBGHZSPIPRIV"] #[inline(always)] pub fn subghzspipriv(&self) -> SUBGHZSPIPRIV_R { SUBGHZSPIPRIV_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 13 - PKAPRIV"] #[inline(always)] pub fn pkapriv(&self) -> PKAPRIV_R { PKAPRIV_R::new(((self.bits >> 13) & 1) != 0) } } impl W { #[doc = "Bit 2 - AESPRIV"] #[inline(always)] #[must_use] pub fn aespriv(&mut self) -> AESPRIV_W<PRIVCFGR1_SPEC, 2> { AESPRIV_W::new(self) } #[doc = "Bit 3 - RNGPRIV"] #[inline(always)] #[must_use] pub fn rngpriv(&mut self) -> RNGPRIV_W<PRIVCFGR1_SPEC, 3> { RNGPRIV_W::new(self) } #[doc = "Bit 4 - SUBGHZSPIPRIV"] #[inline(always)] #[must_use] pub fn subghzspipriv(&mut self) -> SUBGHZSPIPRIV_W<PRIVCFGR1_SPEC, 4> { SUBGHZSPIPRIV_W::new(self) } #[doc = "Bit 13 - PKAPRIV"] #[inline(always)] #[must_use] pub fn pkapriv(&mut self) -> PKAPRIV_W<PRIVCFGR1_SPEC, 13> { PKAPRIV_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "TZSC privilege configuration register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`privcfgr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`privcfgr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct PRIVCFGR1_SPEC; impl crate::RegisterSpec for PRIVCFGR1_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`privcfgr1::R`](R) reader structure"] impl crate::Readable for PRIVCFGR1_SPEC {} #[doc = "`write(|w| ..)` method takes [`privcfgr1::W`](W) writer structure"] impl crate::Writable for PRIVCFGR1_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets PRIVCFGR1 to value 0"] impl crate::Resettable for PRIVCFGR1_SPEC { const RESET_VALUE: Self::Ux = 0; }
#[doc = "Reader of register EP_TYPE"] pub type R = crate::R<u32, super::EP_TYPE>; #[doc = "Writer for register EP_TYPE"] pub type W = crate::W<u32, super::EP_TYPE>; #[doc = "Register EP_TYPE `reset()`'s with value 0"] impl crate::ResetValue for super::EP_TYPE { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Endpoint Type Indication.\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum EP1_TYP_A { #[doc = "0: IN outpoint"] EP_IN, #[doc = "1: OUT outpoint"] EP_OUT, } impl From<EP1_TYP_A> for bool { #[inline(always)] fn from(variant: EP1_TYP_A) -> Self { match variant { EP1_TYP_A::EP_IN => false, EP1_TYP_A::EP_OUT => true, } } } #[doc = "Reader of field `EP1_TYP`"] pub type EP1_TYP_R = crate::R<bool, EP1_TYP_A>; impl EP1_TYP_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> EP1_TYP_A { match self.bits { false => EP1_TYP_A::EP_IN, true => EP1_TYP_A::EP_OUT, } } #[doc = "Checks if the value of the field is `EP_IN`"] #[inline(always)] pub fn is_ep_in(&self) -> bool { *self == EP1_TYP_A::EP_IN } #[doc = "Checks if the value of the field is `EP_OUT`"] #[inline(always)] pub fn is_ep_out(&self) -> bool { *self == EP1_TYP_A::EP_OUT } } #[doc = "Write proxy for field `EP1_TYP`"] pub struct EP1_TYP_W<'a> { w: &'a mut W, } impl<'a> EP1_TYP_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: EP1_TYP_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "IN outpoint"] #[inline(always)] pub fn ep_in(self) -> &'a mut W { self.variant(EP1_TYP_A::EP_IN) } #[doc = "OUT outpoint"] #[inline(always)] pub fn ep_out(self) -> &'a mut W { self.variant(EP1_TYP_A::EP_OUT) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Endpoint Type Indication.\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum EP2_TYP_A { #[doc = "0: IN outpoint"] EP_IN, #[doc = "1: OUT outpoint"] EP_OUT, } impl From<EP2_TYP_A> for bool { #[inline(always)] fn from(variant: EP2_TYP_A) -> Self { match variant { EP2_TYP_A::EP_IN => false, EP2_TYP_A::EP_OUT => true, } } } #[doc = "Reader of field `EP2_TYP`"] pub type EP2_TYP_R = crate::R<bool, EP2_TYP_A>; impl EP2_TYP_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> EP2_TYP_A { match self.bits { false => EP2_TYP_A::EP_IN, true => EP2_TYP_A::EP_OUT, } } #[doc = "Checks if the value of the field is `EP_IN`"] #[inline(always)] pub fn is_ep_in(&self) -> bool { *self == EP2_TYP_A::EP_IN } #[doc = "Checks if the value of the field is `EP_OUT`"] #[inline(always)] pub fn is_ep_out(&self) -> bool { *self == EP2_TYP_A::EP_OUT } } #[doc = "Write proxy for field `EP2_TYP`"] pub struct EP2_TYP_W<'a> { w: &'a mut W, } impl<'a> EP2_TYP_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: EP2_TYP_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "IN outpoint"] #[inline(always)] pub fn ep_in(self) -> &'a mut W { self.variant(EP2_TYP_A::EP_IN) } #[doc = "OUT outpoint"] #[inline(always)] pub fn ep_out(self) -> &'a mut W { self.variant(EP2_TYP_A::EP_OUT) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Endpoint Type Indication.\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum EP3_TYP_A { #[doc = "0: IN outpoint"] EP_IN, #[doc = "1: OUT outpoint"] EP_OUT, } impl From<EP3_TYP_A> for bool { #[inline(always)] fn from(variant: EP3_TYP_A) -> Self { match variant { EP3_TYP_A::EP_IN => false, EP3_TYP_A::EP_OUT => true, } } } #[doc = "Reader of field `EP3_TYP`"] pub type EP3_TYP_R = crate::R<bool, EP3_TYP_A>; impl EP3_TYP_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> EP3_TYP_A { match self.bits { false => EP3_TYP_A::EP_IN, true => EP3_TYP_A::EP_OUT, } } #[doc = "Checks if the value of the field is `EP_IN`"] #[inline(always)] pub fn is_ep_in(&self) -> bool { *self == EP3_TYP_A::EP_IN } #[doc = "Checks if the value of the field is `EP_OUT`"] #[inline(always)] pub fn is_ep_out(&self) -> bool { *self == EP3_TYP_A::EP_OUT } } #[doc = "Write proxy for field `EP3_TYP`"] pub struct EP3_TYP_W<'a> { w: &'a mut W, } impl<'a> EP3_TYP_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: EP3_TYP_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "IN outpoint"] #[inline(always)] pub fn ep_in(self) -> &'a mut W { self.variant(EP3_TYP_A::EP_IN) } #[doc = "OUT outpoint"] #[inline(always)] pub fn ep_out(self) -> &'a mut W { self.variant(EP3_TYP_A::EP_OUT) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Endpoint Type Indication.\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum EP4_TYP_A { #[doc = "0: IN outpoint"] EP_IN, #[doc = "1: OUT outpoint"] EP_OUT, } impl From<EP4_TYP_A> for bool { #[inline(always)] fn from(variant: EP4_TYP_A) -> Self { match variant { EP4_TYP_A::EP_IN => false, EP4_TYP_A::EP_OUT => true, } } } #[doc = "Reader of field `EP4_TYP`"] pub type EP4_TYP_R = crate::R<bool, EP4_TYP_A>; impl EP4_TYP_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> EP4_TYP_A { match self.bits { false => EP4_TYP_A::EP_IN, true => EP4_TYP_A::EP_OUT, } } #[doc = "Checks if the value of the field is `EP_IN`"] #[inline(always)] pub fn is_ep_in(&self) -> bool { *self == EP4_TYP_A::EP_IN } #[doc = "Checks if the value of the field is `EP_OUT`"] #[inline(always)] pub fn is_ep_out(&self) -> bool { *self == EP4_TYP_A::EP_OUT } } #[doc = "Write proxy for field `EP4_TYP`"] pub struct EP4_TYP_W<'a> { w: &'a mut W, } impl<'a> EP4_TYP_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: EP4_TYP_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "IN outpoint"] #[inline(always)] pub fn ep_in(self) -> &'a mut W { self.variant(EP4_TYP_A::EP_IN) } #[doc = "OUT outpoint"] #[inline(always)] pub fn ep_out(self) -> &'a mut W { self.variant(EP4_TYP_A::EP_OUT) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Endpoint Type Indication.\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum EP5_TYP_A { #[doc = "0: IN outpoint"] EP_IN, #[doc = "1: OUT outpoint"] EP_OUT, } impl From<EP5_TYP_A> for bool { #[inline(always)] fn from(variant: EP5_TYP_A) -> Self { match variant { EP5_TYP_A::EP_IN => false, EP5_TYP_A::EP_OUT => true, } } } #[doc = "Reader of field `EP5_TYP`"] pub type EP5_TYP_R = crate::R<bool, EP5_TYP_A>; impl EP5_TYP_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> EP5_TYP_A { match self.bits { false => EP5_TYP_A::EP_IN, true => EP5_TYP_A::EP_OUT, } } #[doc = "Checks if the value of the field is `EP_IN`"] #[inline(always)] pub fn is_ep_in(&self) -> bool { *self == EP5_TYP_A::EP_IN } #[doc = "Checks if the value of the field is `EP_OUT`"] #[inline(always)] pub fn is_ep_out(&self) -> bool { *self == EP5_TYP_A::EP_OUT } } #[doc = "Write proxy for field `EP5_TYP`"] pub struct EP5_TYP_W<'a> { w: &'a mut W, } impl<'a> EP5_TYP_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: EP5_TYP_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "IN outpoint"] #[inline(always)] pub fn ep_in(self) -> &'a mut W { self.variant(EP5_TYP_A::EP_IN) } #[doc = "OUT outpoint"] #[inline(always)] pub fn ep_out(self) -> &'a mut W { self.variant(EP5_TYP_A::EP_OUT) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Endpoint Type Indication.\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum EP6_TYP_A { #[doc = "0: IN outpoint"] EP_IN, #[doc = "1: OUT outpoint"] EP_OUT, } impl From<EP6_TYP_A> for bool { #[inline(always)] fn from(variant: EP6_TYP_A) -> Self { match variant { EP6_TYP_A::EP_IN => false, EP6_TYP_A::EP_OUT => true, } } } #[doc = "Reader of field `EP6_TYP`"] pub type EP6_TYP_R = crate::R<bool, EP6_TYP_A>; impl EP6_TYP_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> EP6_TYP_A { match self.bits { false => EP6_TYP_A::EP_IN, true => EP6_TYP_A::EP_OUT, } } #[doc = "Checks if the value of the field is `EP_IN`"] #[inline(always)] pub fn is_ep_in(&self) -> bool { *self == EP6_TYP_A::EP_IN } #[doc = "Checks if the value of the field is `EP_OUT`"] #[inline(always)] pub fn is_ep_out(&self) -> bool { *self == EP6_TYP_A::EP_OUT } } #[doc = "Write proxy for field `EP6_TYP`"] pub struct EP6_TYP_W<'a> { w: &'a mut W, } impl<'a> EP6_TYP_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: EP6_TYP_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "IN outpoint"] #[inline(always)] pub fn ep_in(self) -> &'a mut W { self.variant(EP6_TYP_A::EP_IN) } #[doc = "OUT outpoint"] #[inline(always)] pub fn ep_out(self) -> &'a mut W { self.variant(EP6_TYP_A::EP_OUT) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "Endpoint Type Indication.\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum EP7_TYP_A { #[doc = "0: IN outpoint"] EP_IN, #[doc = "1: OUT outpoint"] EP_OUT, } impl From<EP7_TYP_A> for bool { #[inline(always)] fn from(variant: EP7_TYP_A) -> Self { match variant { EP7_TYP_A::EP_IN => false, EP7_TYP_A::EP_OUT => true, } } } #[doc = "Reader of field `EP7_TYP`"] pub type EP7_TYP_R = crate::R<bool, EP7_TYP_A>; impl EP7_TYP_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> EP7_TYP_A { match self.bits { false => EP7_TYP_A::EP_IN, true => EP7_TYP_A::EP_OUT, } } #[doc = "Checks if the value of the field is `EP_IN`"] #[inline(always)] pub fn is_ep_in(&self) -> bool { *self == EP7_TYP_A::EP_IN } #[doc = "Checks if the value of the field is `EP_OUT`"] #[inline(always)] pub fn is_ep_out(&self) -> bool { *self == EP7_TYP_A::EP_OUT } } #[doc = "Write proxy for field `EP7_TYP`"] pub struct EP7_TYP_W<'a> { w: &'a mut W, } impl<'a> EP7_TYP_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: EP7_TYP_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "IN outpoint"] #[inline(always)] pub fn ep_in(self) -> &'a mut W { self.variant(EP7_TYP_A::EP_IN) } #[doc = "OUT outpoint"] #[inline(always)] pub fn ep_out(self) -> &'a mut W { self.variant(EP7_TYP_A::EP_OUT) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } #[doc = "Endpoint Type Indication.\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum EP8_TYP_A { #[doc = "0: IN outpoint"] EP_IN, #[doc = "1: OUT outpoint"] EP_OUT, } impl From<EP8_TYP_A> for bool { #[inline(always)] fn from(variant: EP8_TYP_A) -> Self { match variant { EP8_TYP_A::EP_IN => false, EP8_TYP_A::EP_OUT => true, } } } #[doc = "Reader of field `EP8_TYP`"] pub type EP8_TYP_R = crate::R<bool, EP8_TYP_A>; impl EP8_TYP_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> EP8_TYP_A { match self.bits { false => EP8_TYP_A::EP_IN, true => EP8_TYP_A::EP_OUT, } } #[doc = "Checks if the value of the field is `EP_IN`"] #[inline(always)] pub fn is_ep_in(&self) -> bool { *self == EP8_TYP_A::EP_IN } #[doc = "Checks if the value of the field is `EP_OUT`"] #[inline(always)] pub fn is_ep_out(&self) -> bool { *self == EP8_TYP_A::EP_OUT } } #[doc = "Write proxy for field `EP8_TYP`"] pub struct EP8_TYP_W<'a> { w: &'a mut W, } impl<'a> EP8_TYP_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: EP8_TYP_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "IN outpoint"] #[inline(always)] pub fn ep_in(self) -> &'a mut W { self.variant(EP8_TYP_A::EP_IN) } #[doc = "OUT outpoint"] #[inline(always)] pub fn ep_out(self) -> &'a mut W { self.variant(EP8_TYP_A::EP_OUT) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } impl R { #[doc = "Bit 0 - Endpoint Type Indication."] #[inline(always)] pub fn ep1_typ(&self) -> EP1_TYP_R { EP1_TYP_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Endpoint Type Indication."] #[inline(always)] pub fn ep2_typ(&self) -> EP2_TYP_R { EP2_TYP_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - Endpoint Type Indication."] #[inline(always)] pub fn ep3_typ(&self) -> EP3_TYP_R { EP3_TYP_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - Endpoint Type Indication."] #[inline(always)] pub fn ep4_typ(&self) -> EP4_TYP_R { EP4_TYP_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - Endpoint Type Indication."] #[inline(always)] pub fn ep5_typ(&self) -> EP5_TYP_R { EP5_TYP_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - Endpoint Type Indication."] #[inline(always)] pub fn ep6_typ(&self) -> EP6_TYP_R { EP6_TYP_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 6 - Endpoint Type Indication."] #[inline(always)] pub fn ep7_typ(&self) -> EP7_TYP_R { EP7_TYP_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 7 - Endpoint Type Indication."] #[inline(always)] pub fn ep8_typ(&self) -> EP8_TYP_R { EP8_TYP_R::new(((self.bits >> 7) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - Endpoint Type Indication."] #[inline(always)] pub fn ep1_typ(&mut self) -> EP1_TYP_W { EP1_TYP_W { w: self } } #[doc = "Bit 1 - Endpoint Type Indication."] #[inline(always)] pub fn ep2_typ(&mut self) -> EP2_TYP_W { EP2_TYP_W { w: self } } #[doc = "Bit 2 - Endpoint Type Indication."] #[inline(always)] pub fn ep3_typ(&mut self) -> EP3_TYP_W { EP3_TYP_W { w: self } } #[doc = "Bit 3 - Endpoint Type Indication."] #[inline(always)] pub fn ep4_typ(&mut self) -> EP4_TYP_W { EP4_TYP_W { w: self } } #[doc = "Bit 4 - Endpoint Type Indication."] #[inline(always)] pub fn ep5_typ(&mut self) -> EP5_TYP_W { EP5_TYP_W { w: self } } #[doc = "Bit 5 - Endpoint Type Indication."] #[inline(always)] pub fn ep6_typ(&mut self) -> EP6_TYP_W { EP6_TYP_W { w: self } } #[doc = "Bit 6 - Endpoint Type Indication."] #[inline(always)] pub fn ep7_typ(&mut self) -> EP7_TYP_W { EP7_TYP_W { w: self } } #[doc = "Bit 7 - Endpoint Type Indication."] #[inline(always)] pub fn ep8_typ(&mut self) -> EP8_TYP_W { EP8_TYP_W { w: self } } }
#[cfg(test)] mod cli { // use std::io::Write; use std::process::Command; use assert_cmd::prelude::*; // use tempfile; #[test] fn should_invert_match_when_v_flag_is_specified() { let mut cmd = Command::main_binary().unwrap(); cmd.arg("-v").arg(r#"{"name":"jeff goldblum"}"#); let mut stdin_cmd = cmd.with_stdin(); let mut assert_cmd = stdin_cmd.buffer( "{\"name\":\"jeff goldblum\"} {\"id\":\"404c18ce-04ac-457c-99f5-d548b27aa583\"} {\"name\":\"blanco white\"}\n", ); assert_cmd.assert().success().stdout( "{\"id\":\"404c18ce-04ac-457c-99f5-d548b27aa583\"} {\"name\":\"blanco white\"}\n", ); } #[test] fn should_invert_match_when_invert_flag_is_specified() { let mut cmd = Command::main_binary().unwrap(); cmd.arg("--invert-match").arg(r#"{"name":"jeff goldblum"}"#); let mut stdin_cmd = cmd.with_stdin(); let mut assert_cmd = stdin_cmd.buffer( "{\"name\":\"jeff goldblum\"} {\"id\":\"404c18ce-04ac-457c-99f5-d548b27aa583\"} {\"name\":\"blanco white\"}\n", ); assert_cmd.assert().success().stdout( "{\"id\":\"404c18ce-04ac-457c-99f5-d548b27aa583\"} {\"name\":\"blanco white\"}\n", ); } }
#![doc = "generated by AutoRust 0.1.0"] #[cfg(feature = "package-webservices-2017-01")] mod package_webservices_2017_01; #[cfg(feature = "package-webservices-2017-01")] pub use package_webservices_2017_01::{models, operations, API_VERSION}; #[cfg(feature = "package-commitmentPlans-2016-05-preview")] mod package_commitmentplans_2016_05_preview; #[cfg(feature = "package-commitmentPlans-2016-05-preview")] pub use package_commitmentplans_2016_05_preview::{models, operations, API_VERSION}; #[cfg(feature = "package-workspaces-2016-04")] mod package_workspaces_2016_04; #[cfg(feature = "package-workspaces-2016-04")] pub use package_workspaces_2016_04::{models, operations, API_VERSION}; #[cfg(feature = "package-workspaces-2019-10")] mod package_workspaces_2019_10; #[cfg(feature = "package-workspaces-2019-10")] pub use package_workspaces_2019_10::{models, operations, API_VERSION}; #[cfg(feature = "package-webservices-2016-05-preview")] mod package_webservices_2016_05_preview; #[cfg(feature = "package-webservices-2016-05-preview")] pub use package_webservices_2016_05_preview::{models, operations, API_VERSION}; pub struct OperationConfig { pub api_version: String, pub client: reqwest::Client, pub base_path: String, pub token_credential: Option<Box<dyn azure_core::TokenCredential>>, pub token_credential_resource: String, } impl OperationConfig { pub fn new(token_credential: Box<dyn azure_core::TokenCredential>) -> Self { Self { token_credential: Some(token_credential), ..Default::default() } } } impl Default for OperationConfig { fn default() -> Self { Self { api_version: API_VERSION.to_owned(), client: reqwest::Client::new(), base_path: "https://management.azure.com".to_owned(), token_credential: None, token_credential_resource: "https://management.azure.com/".to_owned(), } } }
fn abs(x: i32) -> i32 { if x > 0 { x } else { -x } } fn main() { let nbr = -2; println!("abs of nbr is {}", abs(nbr)); }
#[doc = "Register `CSR` reader"] pub type R = crate::R<CSR_SPEC>; #[doc = "Register `CSR` writer"] pub type W = crate::W<CSR_SPEC>; #[doc = "Field `LSION` reader - Internal low-speed oscillator enable"] pub type LSION_R = crate::BitReader<LSION_A>; #[doc = "Internal low-speed oscillator enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum LSION_A { #[doc = "0: LSI oscillator Off"] Off = 0, #[doc = "1: LSI oscillator On"] On = 1, } impl From<LSION_A> for bool { #[inline(always)] fn from(variant: LSION_A) -> Self { variant as u8 != 0 } } impl LSION_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> LSION_A { match self.bits { false => LSION_A::Off, true => LSION_A::On, } } #[doc = "LSI oscillator Off"] #[inline(always)] pub fn is_off(&self) -> bool { *self == LSION_A::Off } #[doc = "LSI oscillator On"] #[inline(always)] pub fn is_on(&self) -> bool { *self == LSION_A::On } } #[doc = "Field `LSION` writer - Internal low-speed oscillator enable"] pub type LSION_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, LSION_A>; impl<'a, REG, const O: u8> LSION_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "LSI oscillator Off"] #[inline(always)] pub fn off(self) -> &'a mut crate::W<REG> { self.variant(LSION_A::Off) } #[doc = "LSI oscillator On"] #[inline(always)] pub fn on(self) -> &'a mut crate::W<REG> { self.variant(LSION_A::On) } } #[doc = "Field `LSIRDY` reader - Internal low-speed oscillator ready"] pub type LSIRDY_R = crate::BitReader<LSIRDYR_A>; #[doc = "Internal low-speed oscillator ready\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum LSIRDYR_A { #[doc = "0: LSI oscillator not ready"] NotReady = 0, #[doc = "1: LSI oscillator ready"] Ready = 1, } impl From<LSIRDYR_A> for bool { #[inline(always)] fn from(variant: LSIRDYR_A) -> Self { variant as u8 != 0 } } impl LSIRDY_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> LSIRDYR_A { match self.bits { false => LSIRDYR_A::NotReady, true => LSIRDYR_A::Ready, } } #[doc = "LSI oscillator not ready"] #[inline(always)] pub fn is_not_ready(&self) -> bool { *self == LSIRDYR_A::NotReady } #[doc = "LSI oscillator ready"] #[inline(always)] pub fn is_ready(&self) -> bool { *self == LSIRDYR_A::Ready } } #[doc = "Field `RMVF` reader - Remove reset flag"] pub type RMVF_R = crate::BitReader<RMVFW_A>; #[doc = "Remove reset flag\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum RMVFW_A { #[doc = "1: Clears the reset flag"] Clear = 1, } impl From<RMVFW_A> for bool { #[inline(always)] fn from(variant: RMVFW_A) -> Self { variant as u8 != 0 } } impl RMVF_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> Option<RMVFW_A> { match self.bits { true => Some(RMVFW_A::Clear), _ => None, } } #[doc = "Clears the reset flag"] #[inline(always)] pub fn is_clear(&self) -> bool { *self == RMVFW_A::Clear } } #[doc = "Field `RMVF` writer - Remove reset flag"] pub type RMVF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, RMVFW_A>; impl<'a, REG, const O: u8> RMVF_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Clears the reset flag"] #[inline(always)] pub fn clear(self) -> &'a mut crate::W<REG> { self.variant(RMVFW_A::Clear) } } #[doc = "Field `BORRSTF` reader - BOR reset flag"] pub type BORRSTF_R = crate::BitReader<BORRSTFR_A>; #[doc = "BOR reset flag\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum BORRSTFR_A { #[doc = "0: No reset has occured"] NoReset = 0, #[doc = "1: A reset has occured"] Reset = 1, } impl From<BORRSTFR_A> for bool { #[inline(always)] fn from(variant: BORRSTFR_A) -> Self { variant as u8 != 0 } } impl BORRSTF_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> BORRSTFR_A { match self.bits { false => BORRSTFR_A::NoReset, true => BORRSTFR_A::Reset, } } #[doc = "No reset has occured"] #[inline(always)] pub fn is_no_reset(&self) -> bool { *self == BORRSTFR_A::NoReset } #[doc = "A reset has occured"] #[inline(always)] pub fn is_reset(&self) -> bool { *self == BORRSTFR_A::Reset } } #[doc = "Field `BORRSTF` writer - BOR reset flag"] pub type BORRSTF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, BORRSTFR_A>; impl<'a, REG, const O: u8> BORRSTF_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "No reset has occured"] #[inline(always)] pub fn no_reset(self) -> &'a mut crate::W<REG> { self.variant(BORRSTFR_A::NoReset) } #[doc = "A reset has occured"] #[inline(always)] pub fn reset(self) -> &'a mut crate::W<REG> { self.variant(BORRSTFR_A::Reset) } } #[doc = "Field `PADRSTF` reader - PIN reset flag"] pub use BORRSTF_R as PADRSTF_R; #[doc = "Field `PORRSTF` reader - POR/PDR reset flag"] pub use BORRSTF_R as PORRSTF_R; #[doc = "Field `SFTRSTF` reader - Software reset flag"] pub use BORRSTF_R as SFTRSTF_R; #[doc = "Field `WDGRSTF` reader - Independent watchdog reset flag"] pub use BORRSTF_R as WDGRSTF_R; #[doc = "Field `WWDGRSTF` reader - Window watchdog reset flag"] pub use BORRSTF_R as WWDGRSTF_R; #[doc = "Field `LPWRRSTF` reader - Low-power reset flag"] pub use BORRSTF_R as LPWRRSTF_R; #[doc = "Field `PADRSTF` writer - PIN reset flag"] pub use BORRSTF_W as PADRSTF_W; #[doc = "Field `PORRSTF` writer - POR/PDR reset flag"] pub use BORRSTF_W as PORRSTF_W; #[doc = "Field `SFTRSTF` writer - Software reset flag"] pub use BORRSTF_W as SFTRSTF_W; #[doc = "Field `WDGRSTF` writer - Independent watchdog reset flag"] pub use BORRSTF_W as WDGRSTF_W; #[doc = "Field `WWDGRSTF` writer - Window watchdog reset flag"] pub use BORRSTF_W as WWDGRSTF_W; #[doc = "Field `LPWRRSTF` writer - Low-power reset flag"] pub use BORRSTF_W as LPWRRSTF_W; impl R { #[doc = "Bit 0 - Internal low-speed oscillator enable"] #[inline(always)] pub fn lsion(&self) -> LSION_R { LSION_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - Internal low-speed oscillator ready"] #[inline(always)] pub fn lsirdy(&self) -> LSIRDY_R { LSIRDY_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 24 - Remove reset flag"] #[inline(always)] pub fn rmvf(&self) -> RMVF_R { RMVF_R::new(((self.bits >> 24) & 1) != 0) } #[doc = "Bit 25 - BOR reset flag"] #[inline(always)] pub fn borrstf(&self) -> BORRSTF_R { BORRSTF_R::new(((self.bits >> 25) & 1) != 0) } #[doc = "Bit 26 - PIN reset flag"] #[inline(always)] pub fn padrstf(&self) -> PADRSTF_R { PADRSTF_R::new(((self.bits >> 26) & 1) != 0) } #[doc = "Bit 27 - POR/PDR reset flag"] #[inline(always)] pub fn porrstf(&self) -> PORRSTF_R { PORRSTF_R::new(((self.bits >> 27) & 1) != 0) } #[doc = "Bit 28 - Software reset flag"] #[inline(always)] pub fn sftrstf(&self) -> SFTRSTF_R { SFTRSTF_R::new(((self.bits >> 28) & 1) != 0) } #[doc = "Bit 29 - Independent watchdog reset flag"] #[inline(always)] pub fn wdgrstf(&self) -> WDGRSTF_R { WDGRSTF_R::new(((self.bits >> 29) & 1) != 0) } #[doc = "Bit 30 - Window watchdog reset flag"] #[inline(always)] pub fn wwdgrstf(&self) -> WWDGRSTF_R { WWDGRSTF_R::new(((self.bits >> 30) & 1) != 0) } #[doc = "Bit 31 - Low-power reset flag"] #[inline(always)] pub fn lpwrrstf(&self) -> LPWRRSTF_R { LPWRRSTF_R::new(((self.bits >> 31) & 1) != 0) } } impl W { #[doc = "Bit 0 - Internal low-speed oscillator enable"] #[inline(always)] #[must_use] pub fn lsion(&mut self) -> LSION_W<CSR_SPEC, 0> { LSION_W::new(self) } #[doc = "Bit 24 - Remove reset flag"] #[inline(always)] #[must_use] pub fn rmvf(&mut self) -> RMVF_W<CSR_SPEC, 24> { RMVF_W::new(self) } #[doc = "Bit 25 - BOR reset flag"] #[inline(always)] #[must_use] pub fn borrstf(&mut self) -> BORRSTF_W<CSR_SPEC, 25> { BORRSTF_W::new(self) } #[doc = "Bit 26 - PIN reset flag"] #[inline(always)] #[must_use] pub fn padrstf(&mut self) -> PADRSTF_W<CSR_SPEC, 26> { PADRSTF_W::new(self) } #[doc = "Bit 27 - POR/PDR reset flag"] #[inline(always)] #[must_use] pub fn porrstf(&mut self) -> PORRSTF_W<CSR_SPEC, 27> { PORRSTF_W::new(self) } #[doc = "Bit 28 - Software reset flag"] #[inline(always)] #[must_use] pub fn sftrstf(&mut self) -> SFTRSTF_W<CSR_SPEC, 28> { SFTRSTF_W::new(self) } #[doc = "Bit 29 - Independent watchdog reset flag"] #[inline(always)] #[must_use] pub fn wdgrstf(&mut self) -> WDGRSTF_W<CSR_SPEC, 29> { WDGRSTF_W::new(self) } #[doc = "Bit 30 - Window watchdog reset flag"] #[inline(always)] #[must_use] pub fn wwdgrstf(&mut self) -> WWDGRSTF_W<CSR_SPEC, 30> { WWDGRSTF_W::new(self) } #[doc = "Bit 31 - Low-power reset flag"] #[inline(always)] #[must_use] pub fn lpwrrstf(&mut self) -> LPWRRSTF_W<CSR_SPEC, 31> { LPWRRSTF_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "clock control &amp; status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`csr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`csr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct CSR_SPEC; impl crate::RegisterSpec for CSR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`csr::R`](R) reader structure"] impl crate::Readable for CSR_SPEC {} #[doc = "`write(|w| ..)` method takes [`csr::W`](W) writer structure"] impl crate::Writable for CSR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets CSR to value 0x0e00_0000"] impl crate::Resettable for CSR_SPEC { const RESET_VALUE: Self::Ux = 0x0e00_0000; }
#[path = "support/macros.rs"] #[macro_use] mod macros; mod support; use criterion::{criterion_group, criterion_main, Criterion}; use std::ops::Mul; use support::*; bench_binop!( mat2_mul_vec2, "mat2 mul vec2", op => mul, from1 => random_mat2, from2 => random_vec2 ); bench_unop!( mat2_transpose, "mat2 transpose", op => transpose, from => random_mat2 ); bench_unop!( mat2_determinant, "mat2 determinant", op => determinant, from => random_mat2 ); bench_unop!(mat2_inverse, "mat2 inverse", op => inverse, from => random_mat2); bench_binop!(mat2_mul_mat2, "mat2 mul mat2", op => mul, from => random_mat2); criterion_group!( benches, mat2_transpose, mat2_determinant, mat2_inverse, mat2_mul_vec2, mat2_mul_mat2, ); criterion_main!(benches);
use std::ops::{ Index, IndexMut, Deref, DerefMut, }; pub struct Palette<T> { pub map: [T; 256], pub size: usize, pub transparent: Option<u8>, } impl<T: Copy> Palette<T> { pub fn new<C: Into<Option<u8>>>(def: T, c: C) -> Self { Self { map: [def; 256], size: 256, transparent: c.into(), } } } impl<T> Index<u8> for Palette<T> { type Output = T; fn index(&self, idx: u8) -> &Self::Output { &self.map[idx as usize] } } impl<T> IndexMut<u8> for Palette<T> { fn index_mut<'a>(&'a mut self, idx: u8) -> &'a mut T { &mut self.map[idx as usize] } } impl<T> Deref for Palette<T> { type Target = [T]; fn deref(&self) -> &Self::Target { &self.map[..] } } impl<T> DerefMut for Palette<T> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.map[..] } } /* pub struct XOR([u8; 256]); impl Index<u8> for XOR { type Output = u8; fn index(&self, idx: u8) -> &Self::Output { &self.0[idx as usize] } } impl XOR { pub fn new() -> XOR { XOR([0u8; 256]) } #[allow(overflowing_literals)] pub fn compute<F>(&mut self, diff: F) where F: Fn(u8, u8) -> isize { for i in 0..256 { self.0[i] = i as u8 ^1; } loop { // Find the smallest difference in the table // Try to pair these two colors better let mut found = false; for idx in 0..256 { let mut improvement = 0; let mut betterpair = idx; for i in 0..256 { // diffs before the swap let before = diff(idx, self[idx]) + diff(i, self[i]); // diffs after the swap let after = diff(idx, self[i]) + diff(i, self[idx]); if after - before > improvement { improvement = after - before; betterpair = i; } } if improvement > 0 { // Swapping these colors get us something "more different". Do it ! let idx2 = self[betterpair]; let i2 = self[idx]; self.0[betterpair as usize] = i2; self.0[i2 as usize] = betterpair as u8; self.0[idx as usize] = idx2 as u8; self.0[idx2 as usize] = idx as u8; found = true; } } if !found { break; } } } } */
use util::*; const LEN: usize = 'z' as usize - 'a' as usize + 1; fn main() { let timer = Timer::new(); let count: usize = input::vec::<String>(&std::env::args().nth(1).unwrap(), "\n\n") .iter() .map(|s| { let answers: Vec<[bool; LEN]> = s .split('\n') .map(|s| { let mut answered: [bool; LEN] = [false; LEN]; for c in s.bytes() { answered[c as usize - 'a' as usize] = true; } answered }) .collect(); let mut count = 0; for i in 0..LEN { let mut in_all = true; for j in 0..answers.len() { in_all &= answers[j][i]; } if in_all { count += 1; } } count }) .sum(); timer.print(); println!("{}", count); }
// Copyright 2014-2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #[warn( clippy::cast_precision_loss, clippy::cast_possible_truncation, clippy::cast_sign_loss, clippy::cast_possible_wrap, clippy::cast_lossless )] #[allow(clippy::no_effect, clippy::unnecessary_operation)] fn main() { // Casting from *size 1isize as i8; 1isize as f64; 1usize as f64; 1isize as f32; 1usize as f32; 1isize as i32; 1isize as u32; 1usize as u32; 1usize as i32; // Casting to *size 1i64 as isize; 1i64 as usize; 1u64 as isize; 1u64 as usize; 1u32 as isize; 1u32 as usize; // Should not trigger any lint 1i32 as isize; // Neither should this 1i32 as usize; }
use std::rc::Rc; use std::thread; use std::sync::{Arc, Mutex}; extern crate easydb; use self::easydb::Column; use self::easydb::Table; use self::easydb::DbPool; use std::collections::BTreeMap; extern crate rustc_serialize; use self::rustc_serialize::json::Json; use self::rustc_serialize::json::ToJson; extern crate postgres; use self::postgres::{Connection, SslMode}; use self::postgres::types::Type; extern crate rand; use self::rand::distributions::{IndependentSample, Range}; pub struct MyDbPool { dsn:String, conns:Vec<Mutex<Connection>>, } impl MyDbPool { pub fn new(dsn:&str, size:u32) -> MyDbPool { let mut conns = vec![]; for i in 0..size { let conn = match Connection::connect(dsn, &SslMode::None) { Ok(conn) => conn, Err(e) => { println!("Connection error: {}", e); break; } }; conns.push(Mutex::new(conn)); } MyDbPool { dsn:dsn.to_string(), conns:conns, } } } impl DbPool for MyDbPool { fn execute(&self, sql:&str) -> Json { //println!("{}", sql); let between = Range::new(0, self.conns.len()); let mut rng = rand::thread_rng(); let rand_int = between.ind_sample(&mut rng); let conn = self.conns[rand_int].lock().unwrap(); let stmt = conn.prepare(&sql).unwrap(); let rows = stmt.query(&[]).unwrap(); let mut back_obj = BTreeMap::new(); let mut data:Vec<Json> = Vec::new(); for row in &rows { let mut row_map = BTreeMap::new(); let columns = row.columns(); for column in columns { let name = column.name(); match *column.type_() { Type::Int4 => { let value:i32 = row.get(name); row_map.insert(name.to_string(), value.to_json()); }, Type::Int8 => { let value:i64 = row.get(name); row_map.insert(name.to_string(), value.to_json()); }, Type::Timestamp => { }, _ => { let value:String = row.get(name); row_map.insert(name.to_string(), value.to_json()); }, } } data.push(row_map.to_json()); } back_obj.insert("data".to_string(), data.to_json()); back_obj.insert("rows".to_string(), rows.len().to_json()); back_obj.to_json() } } pub struct DataBase<T> { pub name:String, pub table_list:BTreeMap<String, Table<T>>, pub dc:Arc<T>, //data center } impl<T:DbPool> DataBase<T> { fn get_table_define(name:&str, vec:Vec<Column>, dc:Arc<T>) -> Table<T> { let mut map = BTreeMap::new(); for col in vec { map.insert(col.name.clone(), col); } Table::new(name, map, dc) } pub fn new(name:&str, dc:Arc<T>) -> DataBase<T> { let mut table_list = BTreeMap::new(); { let dc = dc.clone(); let vec = vec![ Column::new("id", "serial", -1, "primary key", false), Column::new("name", "varchar", 80, "not null", true), Column::new("salary", "integer", -1, "default 0", false), Column::new("create_time", "bigint", -1, "default -1", false), Column::new("type", "integer", -1, "default -1", false), Column::new("version", "integer", -1, "default -1", false), ]; let table = DataBase::get_table_define("emp", vec, dc); table_list.insert(table.name.clone(), table); } { let dc = dc.clone(); let vec = vec![ Column::new("id", "serial", -1, "primary key", false), Column::new("app_id", "integer", -1, "default 0", false), Column::new("device_id", "integer", -1, "default 0", false), Column::new("date", "integer", -1, "", false), Column::new("type", "integer", -1, "default 0", false), Column::new("rtype", "integer", -1, "default 0", false), Column::new("media", "integer", -1, "default 0", false), Column::new("placement", "integer", -1, "default 0", false), Column::new("type_times", "integer", -1, "default 0", false), Column::new("event_interval", "integer", -1, "default 0", false), Column::new("uid", "varchar", 20, "default ''", false), Column::new("created_at", "integer", -1, "default 0", false), Column::new("accessed_at", "integer", -1, "default 0", false), Column::new("it", "integer", -1, "default 0", false), Column::new("at", "integer", -1, "default 0", false), Column::new("tkStamp", "integer", -1, "default 0", false), Column::new("ch", "varchar", 30, "default ''", false), Column::new("sdkv", "varchar", 20, "default ''", false), Column::new("appv", "varchar", 20, "default ''", false), Column::new("osv", "varchar", 10, "default ''", false), Column::new("os", "varchar", 20, "default ''", false), Column::new("brand", "varchar", 20, "default ''", false), Column::new("model", "varchar", 20, "default ''", false), Column::new("net", "varchar", 20, "default ''", false), Column::new("mcc", "varchar", 10, "default ''", false), Column::new("mnc", "varchar", 10, "default ''", false), Column::new("ap_mac", "varchar", 20, "default ''", false), Column::new("is_jailbreak", "integer", -1, "default 0", false), Column::new("is_root", "integer", -1, "default 0", false), Column::new("ad_tracked", "integer", -1, "default 0", false), Column::new("dev_name", "varchar", 100, "default ''", false), Column::new("package_name", "varchar", 100, "default ''", false), Column::new("keyword", "varchar", 50, "default ''", false), Column::new("campaign", "varchar", 20, "default ''", false), Column::new("creative", "varchar", 50, "default ''", false), Column::new("media_appkey", "varchar", 255, "default ''", false), Column::new("media_advertiserid", "varchar", 255, "default ''", false), Column::new("media_appid", "varchar", 255, "default ''", false), Column::new("media_custom", "varchar", 255, "default ''", false), Column::new("site_ip", "integer", -1, "default 0", false), Column::new("track_ip", "integer", -1, "default 0", false), Column::new("region_id", "integer", -1, "default 0", false), Column::new("site_ua", "varchar", 50, "default ''", false), Column::new("track_ua", "varchar", 50, "default ''", false), Column::new("source_type", "integer", -1, "default 0", false), Column::new("cheat_type", "integer", -1, "default 0", false), Column::new("ascription_type", "integer", -1, "default 0", false), Column::new("match_type", "integer", -1, "default 0", false), Column::new("purchase_currency", "integer", -1, "default 0", false), Column::new("purchase_value", "integer", -1, "default 0", false), Column::new("is_last_click", "integer", -1, "default 0", false), Column::new("ua", "varchar", 255, "default ''", false), Column::new("subchannel", "varchar", 255, "default ''", false), Column::new("extra", "integer", -1, "default 0", false), ]; let table = DataBase::get_table_define("raw", vec, dc); table_list.insert(table.name.clone(), table); } { let dc = dc.clone(); let vec = vec![ Column::new("id", "serial", -1, "primary key", false), Column::new("app_id", "integer", -1, "default 0", false), Column::new("device_id", "integer", -1, "default 0", false), Column::new("count", "integer", -1, "default 0", false), ]; let table = DataBase::get_table_define("raw_tmp", vec, dc); table_list.insert(table.name.clone(), table); } for (name, table) in table_list.iter() { println!("{}", table.to_ddl_string()); } DataBase { name:name.to_string(), table_list:table_list, dc:dc, } } pub fn get_table(&self, name:&str) -> Option<&Table<T>> { self.table_list.get(name) } pub fn execute(&self, sql:&str) -> Result<Json, i32> { Result::Ok(self.dc.execute(sql)) } }
use cortex_m::{ iprintln, peripheral::{ ITM, TPIU, }, interrupt, }; use log::{ Log, Level, Metadata, Record, SetLoggerError }; const STIM_PORT_NUMBER: usize = 0; #[derive(Debug, PartialEq, Clone, Copy)] pub enum Error { ImpossibleBaudRate, } ///Updates the tpiu prescaler to output the desired baud rate ///trace_clk_freq: The frequency of TRACECLKIN in HZ, this is HCLK on most STM32 devices /// but is implementation specific. Check the ref manual for TRACECLKIN ///baud: The baud rate to set on SWO ///Returns an error if baud > trace_clk_freq or if trace_clk_freq % baud != 0 pub fn update_tpiu_baudrate(trace_clk_freq: u32, baud: u32) -> Result<(), Error> { if baud > trace_clk_freq || trace_clk_freq % baud != 0 { Err(Error::ImpossibleBaudRate) } else { let prescaler = (trace_clk_freq / baud) - 1; unsafe { (*TPIU::PTR).acpr.write(prescaler); } Ok(()) } } struct ItmLogger { enabled: bool, log_level: Level, } impl Log for ItmLogger { fn enabled(&self, metadata: &Metadata) -> bool { if !(self.enabled && metadata.level() <= self.log_level) { return false; } #[cfg(feature = "perform-enabled-checks")] unsafe { use cortex_m::peripheral::DCB; const ITM_TCR_ENABLE_POS: u32 = 0; const ITM_TCR_ENABLE_MASK: u32 = 1 << ITM_TCR_ENABLE_POS; let itm = &(*ITM::ptr()); // Check if DEBUGEN is set if !DCB::is_debugger_attached() { return false; } // Check if tracing is enabled if itm.tcr.read() & ITM_TCR_ENABLE_MASK == 0 { return false; } // Check if the stim port we're using is enabled if itm.ter[0].read() & (1 << (STIM_PORT_NUMBER as u32)) == 0 { return false; } } true } fn log(&self, record: &Record) { if self.enabled(record.metadata()) { unsafe { let itm = &mut (*ITM::PTR); interrupt::free(|_| { iprintln!( &mut itm.stim[STIM_PORT_NUMBER], "{:<5} [{}] {}", record.level(), record.target(), record.args()); }); } } } fn flush(&self) {} } static mut LOGGER: ItmLogger = ItmLogger { enabled: true, log_level: Level::Trace, }; /// Initialise the logger and set the log level to the provided `log_level` pub fn init_with_level(log_level: Level) -> Result<(), SetLoggerError> { interrupt::free(|_| unsafe { log::set_logger(&LOGGER) })?; log::set_max_level(log_level.to_level_filter()); Ok(()) } /// Initialize the logger with default log level (Trace) pub fn init() -> Result<(), SetLoggerError> { init_with_level(Level::Trace) } /// Wrapper around `init` that panics if an error occurs pub fn logger_init() { init().unwrap(); } /// Globally disable all logging pub fn disable_logger() { interrupt::free(|_| unsafe { LOGGER.enabled = false; }); } /// Globally enable logging, level filtering is still performed pub fn enable_logger() { interrupt::free(|_| unsafe { LOGGER.enabled = true; }); }
use chrono::{DateTime, Utc}; use hyper::body::HttpBody; use hyper::client::connect::dns::GaiResolver; use hyper::client::HttpConnector; use hyper::{header, Body, Method, Request, Response, Uri}; use hyper_tls::HttpsConnector; use percent_encoding::{utf8_percent_encode, AsciiSet, CONTROLS}; use serde::{Deserialize, Serialize}; use serde_json; use std::collections::HashMap; use std::fmt; use std::sync::Arc; #[derive(Debug, Deserialize, Serialize)] pub struct AsanaData { pub users: Vec<AsanaUser>, pub projects: Vec<AsanaProject>, pub project_sections: Vec<AsanaProjectSections>, pub project_task_gids: Vec<AsanaProjectTaskGids>, pub tasks: Vec<AsanaTask>, pub task_stories: Vec<AsanaTaskStories>, } #[derive(Debug, Deserialize, Serialize)] pub struct AsanaProject { pub gid: String, pub name: String, pub created_at: chrono::DateTime<chrono::Utc>, } #[derive(Debug, Deserialize, Serialize)] pub struct AsanaProjectSections { pub project_gid: String, pub sections: Vec<AsanaSection>, } #[derive(Debug, Deserialize, Serialize)] pub struct AsanaSection { pub gid: String, pub name: String, } #[derive(Debug, Deserialize, Serialize)] pub struct AsanaTaskCompact { pub gid: String, } #[derive(Debug, Deserialize, Serialize)] pub struct AsanaAssigneeCompact { pub gid: String, } #[derive(Debug, Deserialize, Serialize)] pub struct AsanaMembershipCompact { pub gid: String, } #[derive(Debug, Deserialize, Serialize)] pub struct AsanaProjectTaskGids { pub project_gid: String, pub task_gids: Vec<String>, } #[derive(Debug, Deserialize, Serialize)] pub struct AsanaTask { pub gid: String, pub name: String, pub created_at: chrono::DateTime<chrono::Utc>, pub completed: bool, pub completed_at: Option<chrono::DateTime<chrono::Utc>>, pub assignee: Option<AsanaAssigneeCompact>, pub memberships: Vec<HashMap<String, AsanaMembershipCompact>>, } #[derive(Debug, Deserialize, Serialize)] pub struct AsanaStory { pub created_at: chrono::DateTime<chrono::Utc>, pub resource_subtype: String, pub text: String, } #[derive(Debug, Deserialize, Serialize)] pub struct AsanaTaskStories { pub task_gid: String, pub stories: Vec<AsanaStory>, } #[derive(Debug, Deserialize, Serialize)] pub struct AsanaUser { pub gid: String, pub name: String, pub email: String, } impl AsanaUser { fn missing_user(user_gid: &str) -> AsanaUser { Self { gid: String::from(user_gid), name: format!("MissingUser({})", user_gid), email: format!("{}@nowhere.com", user_gid), } } } // ------ static BASE_URL: &str = "https://app.asana.com/api/1.0"; // ------ Internal helper structs #[derive(Debug, Deserialize)] struct AsanaContainer<T> { data: T, } #[derive(Debug, Deserialize)] struct AsanaPage<T> { data: Vec<T>, next_page: Option<AsanaNextPage>, } #[derive(Debug, Deserialize)] struct AsanaNextPage { offset: String, } // ------ #[derive(Debug)] enum AsanaError { Missing, } impl fmt::Display for AsanaError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{:?}", self) } } impl std::error::Error for AsanaError {} // ------ // https://url.spec.whatwg.org/#query-percent-encode-set const QUERY_CONTROL_SET: &AsciiSet = &CONTROLS.add(b'+'); fn query_encode(query_str: &str) -> String { utf8_percent_encode(query_str, QUERY_CONTROL_SET).collect() } // ------ pub struct AsanaClient<'a> { client: hyper::Client<HttpsConnector<HttpConnector<GaiResolver>>>, token: &'a str, rate_limiter: Option<Arc<futures::lock::Mutex<tokio::time::Interval>>>, } impl<'a> AsanaClient<'a> { pub fn new(token: &str, max_rps: Option<u16>) -> AsanaClient { let https = hyper_tls::HttpsConnector::new(); let client = hyper::Client::builder().build::<_, hyper::Body>(https); let rate_limiter = max_rps.map(|rps| { if rps == 0 || rps > 1000 { panic!("max_rps must be > 0 and <= 1000"); } let duration_millis = 1000u64 / rps as u64; return Arc::new(futures::lock::Mutex::new(tokio::time::interval( tokio::time::Duration::from_millis(duration_millis), ))); }); AsanaClient { client, token, rate_limiter, } } pub async fn get_project(&self, project_gid: &str) -> AsanaProject { let uri_str = format!( "{}/projects/{}?opt_fields=this.name,this.created_at", BASE_URL, project_gid ); log::debug!("get_project: project={}", project_gid); let body_str = self.get_response_as_string(&uri_str).await.unwrap(); let project: AsanaContainer<AsanaProject> = serde_json::from_str(&body_str).unwrap_or_else(|err| { panic!( "get_project: Could not parse AsanaProject: uri={} response.body={} error={}", uri_str, body_str, err ); }); let project = project.data; return project; } pub async fn get_project_sections(&self, project_gid: &str) -> AsanaProjectSections { let mut sections: Vec<AsanaSection> = Vec::with_capacity(10 as usize); let mut offset = None; loop { let uri_str = match offset { None => format!( "{}/projects/{}/sections?opt_fields=this.name&limit=20", BASE_URL, project_gid ), Some(offset) => format!( "{}/projects/{}/sections?opt_fields=this.name&limit=20&offset={}", BASE_URL, project_gid, offset ), }; log::debug!("get_project_sections: project={}", project_gid); let body_str = self.get_response_as_string(&uri_str).await.unwrap(); let page: AsanaPage<AsanaSection> = serde_json::from_str(&body_str).unwrap_or_else(|err| { panic!( "get_project_sections: Could not parse page: uri={} response.body={} error={}", uri_str, body_str, err ); }); for section in page.data { sections.push(section); } offset = page.next_page.map(|np| np.offset); if offset.is_none() { break; } } return AsanaProjectSections { project_gid: project_gid.to_owned(), sections, }; } pub async fn get_project_task_gids( &self, project_gid: &str, from: &DateTime<Utc>, ) -> AsanaProjectTaskGids { let mut task_gids: Vec<String> = Vec::with_capacity(100 as usize); let completed_since_str = query_encode(&from.to_rfc3339()); let mut offset = None; loop { let uri_str = match offset { None => format!( "{}/tasks?project={}&completed_since={}&opt_fields=this.gid&limit=20", BASE_URL, project_gid, completed_since_str ), Some(offset) => format!( "{}/tasks?project={}&completed_since={}&opt_fields=this.gid&limit=20&offset={}", BASE_URL, project_gid, completed_since_str, offset ), }; log::debug!("get_project_task_gids: project={}", project_gid); let body_str = self.get_response_as_string(&uri_str).await.unwrap(); let page: AsanaPage<AsanaTaskCompact> = serde_json::from_str(&body_str).unwrap_or_else(|err| { panic!( "get_project_task_gids: Could not parse page: uri={} response.body={} error={}", uri_str, body_str, err ); }); for task in page.data { task_gids.push(task.gid); } offset = page.next_page.map(|np| np.offset); if offset.is_none() { break; } } return AsanaProjectTaskGids { project_gid: project_gid.to_owned(), task_gids, }; } pub async fn get_task(&self, task_gid: &str) -> AsanaTask { let opt_fields = "this.(name|created_at|completed|completed_at),this.assignee.gid,this.memberships.section.gid"; let uri_str = format!("{}/tasks/{}?opt_fields={}", BASE_URL, task_gid, opt_fields); log::debug!("get_task: task={}", task_gid); let body_str = self.get_response_as_string(&uri_str).await.unwrap(); let task: AsanaContainer<AsanaTask> = serde_json::from_str(&body_str).unwrap_or_else(|err| { panic!( "get_task: Could not parse task: uri={} response.body={} error={}", uri_str, body_str, err ); }); let task = task.data; return task; } pub async fn get_task_stories(&self, task_gid: &str) -> AsanaTaskStories { let mut stories = Vec::new(); let opt_fields = "this.(created_at|resource_subtype|text)"; let mut offset = None; loop { let uri_str = match offset { None => format!( "{}/tasks/{}/stories?opt_fields={}&limit=20", BASE_URL, task_gid, opt_fields ), Some(offset) => format!( "{}/tasks/{}/stories?opt_fields={}&limit=20&offset={}", BASE_URL, task_gid, opt_fields, offset ), }; log::debug!("get_task_stories: task={}", task_gid); let body_str = self.get_response_as_string(&uri_str).await.unwrap(); let page: AsanaPage<AsanaStory> = serde_json::from_str(&body_str).unwrap_or_else(|err| { panic!( "get_task_stories: Could not parse page: uri={} response.body={} error={}", uri_str, body_str, err ); }); for story in page.data { stories.push(story); } offset = page.next_page.map(|np| np.offset); if offset.is_none() { break; } } return AsanaTaskStories { task_gid: task_gid.to_owned(), stories, }; } pub async fn get_user(&self, user_gid: &str) -> AsanaUser { let uri_str = format!( "{}/users/{}?opt_fields=this.(name|email)", BASE_URL, user_gid ); log::debug!("get_user: user_gid={}", user_gid); match self.get_response_as_string(&uri_str).await { Ok(body_str) => { let user: AsanaContainer<AsanaUser> = serde_json::from_str(&body_str) .unwrap_or_else(|err| { panic!( "get_user: Could not parse user: uri={} response.body={} error={}", uri_str, body_str, err ); }); return user.data; } Err(m) => match m { AsanaError::Missing => AsanaUser::missing_user(user_gid), }, } } async fn get_response_as_string(&self, uri_str: &str) -> Result<String, AsanaError> { let uri = uri_str.parse::<Uri>().expect("URL parsing error"); let auth_header_val_str = format!("Bearer {}", self.token); let request = Request::builder() .method(Method::GET) .uri(uri) .header(header::AUTHORIZATION, &auth_header_val_str) .body(Body::empty()) .expect("Request Creation Error"); if let Some(rate_limiter) = &self.rate_limiter { rate_limiter.lock().await.tick().await; } let mut response = self.client.request(request).await.expect("HTTP GET error"); let length = Self::get_content_length(&uri_str, &response); // log::debug!( // "get_response_as_string: uri={} status={} content-length={:?}", // uri_str, // response.status(), // length // ); if response.status().eq(&hyper::StatusCode::NOT_FOUND) { return Err(AsanaError::Missing); } let mut bytes: Vec<u8> = Vec::with_capacity(length.unwrap_or(1024) as usize); while let Some(chunk) = response.body_mut().data().await { bytes.extend(chunk.expect("Chunk should have bytes")); } let body_str = String::from_utf8(bytes).expect("Body should be UTF-8 string"); if !response.status().is_success() { panic!( "get_response_as_string: bad response: uri={}\n\t- response={:?}\n\t- body={:?}", uri_str, response, body_str ); } return Ok(body_str); } fn get_content_length(uri_str: &str, response: &Response<Body>) -> Option<u32> { let length: Option<u32> = response.headers().get(header::CONTENT_LENGTH).map(|h| { h.to_str() .unwrap_or_else(|err| { panic!( "get_response_as_string: content-length non-string: uri={} response={:?} error={}", uri_str, response, err ); }) .parse() .unwrap_or_else(|err| { panic!( "get_response_as_string: content-length not integer: uri={} response={:?} error={}", uri_str, response, err ); }) }); return length; } }
#![allow(clippy::too_many_arguments)] use crate::sys; use std::ptr::NonNull; pub use sys::SIMCONNECT_OBJECT_ID_USER; pub use msfs_derive::sim_connect_data_definition as data_definition; /// A trait implemented by the `data_definition` attribute. pub trait DataDefinition { #[doc(hidden)] const DEFINITIONS: &'static [(&'static str, &'static str, sys::SIMCONNECT_DATATYPE)]; } /// Rusty HRESULT wrapper. #[derive(Debug)] pub struct HResult(sys::HRESULT); impl std::fmt::Display for HResult { fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { std::fmt::Debug::fmt(self, fmt) } } impl std::error::Error for HResult {} pub type Result<T> = std::result::Result<T, HResult>; #[inline(always)] fn map_err(result: sys::HRESULT) -> Result<()> { if result >= 0 { Ok(()) } else { Err(HResult(result)) } } /// Callback provided to SimConnect session. pub type SimConnectRecvCallback = dyn Fn(&SimConnect, SimConnectRecv); /// A SimConnect session. This provides access to data within the MSFS sim. pub struct SimConnect { handle: NonNull<std::ffi::c_void>, callback: Box<SimConnectRecvCallback>, } extern "C" fn dispatch_cb( recv: *mut sys::SIMCONNECT_RECV, _cb_data: sys::DWORD, p_context: *mut std::ffi::c_void, ) { let sim = unsafe { &*(p_context as *mut SimConnect) }; let recv = unsafe { match (*recv).dwID as sys::SIMCONNECT_RECV_ID { sys::SIMCONNECT_RECV_ID_SIMCONNECT_RECV_ID_NULL => Some(SimConnectRecv::Null), sys::SIMCONNECT_RECV_ID_SIMCONNECT_RECV_ID_OPEN => Some(SimConnectRecv::Open( &*(recv as *mut sys::SIMCONNECT_RECV_OPEN), )), sys::SIMCONNECT_RECV_ID_SIMCONNECT_RECV_ID_QUIT => Some(SimConnectRecv::Quit( &*(recv as *mut sys::SIMCONNECT_RECV_QUIT), )), sys::SIMCONNECT_RECV_ID_SIMCONNECT_RECV_ID_EVENT => Some(SimConnectRecv::Event( &*(recv as *mut sys::SIMCONNECT_RECV_EVENT), )), _ => None, } }; if let Some(recv) = recv { (sim.callback)(sim, recv); } } impl SimConnect { /// Send a request to the Microsoft Flight Simulator server to open up communications with a new client. pub fn open<F>(name: &str, callback: F) -> Result<SimConnect> where F: Fn(&SimConnect, SimConnectRecv) + 'static, { unsafe { let mut ptr = 0; let name = std::ffi::CString::new(name).unwrap(); map_err(sys::SimConnect_Open( &mut ptr, name.as_ptr(), std::ptr::null_mut(), 0, 0, 0, ))?; let ptr = ptr as *mut std::ffi::c_void; debug_assert!(!ptr.is_null()); let mut sim = SimConnect { handle: NonNull::new_unchecked(ptr), callback: Box::new(callback), }; sim.call_dispatch()?; Ok(sim) } } fn call_dispatch(&mut self) -> Result<()> { unsafe { map_err(sys::SimConnect_CallDispatch( self.handle.as_ptr() as sys::HANDLE, Some(dispatch_cb), self as *mut SimConnect as *mut std::ffi::c_void, )) } } /// Add an individual client defined event to a notification group. pub fn add_client_event_to_notification_group( &self, group_id: sys::SIMCONNECT_NOTIFICATION_GROUP_ID, event_id: sys::SIMCONNECT_CLIENT_EVENT_ID, maskable: bool, ) -> Result<()> { unsafe { map_err(sys::SimConnect_AddClientEventToNotificationGroup( self.handle.as_ptr() as sys::HANDLE, group_id, event_id, maskable as i32, )) } } /// Associate a client defined event ID with a Prepar3D event name. pub fn map_client_event_to_sim_event( &self, id: sys::SIMCONNECT_CLIENT_EVENT_ID, name: &str, ) -> Result<()> { unsafe { let name = std::ffi::CString::new(name).unwrap(); map_err(sys::SimConnect_MapClientEventToSimEvent( self.handle.as_ptr() as sys::HANDLE, id, name.as_ptr(), )) } } /// Connect an input event (such as a keystroke, joystick or mouse movement) with the sending of an appropriate event notification. pub fn map_input_event_to_client_event( &self, group_id: sys::SIMCONNECT_NOTIFICATION_GROUP_ID, input_definition: &str, down_event_id: sys::SIMCONNECT_CLIENT_EVENT_ID, down_value: sys::DWORD, up_event_id: sys::SIMCONNECT_CLIENT_EVENT_ID, up_value: sys::DWORD, maskable: bool, ) -> Result<()> { unsafe { let input_definition = std::ffi::CString::new(input_definition).unwrap(); map_err(sys::SimConnect_MapInputEventToClientEvent( self.handle.as_ptr() as sys::HANDLE, group_id, input_definition.as_ptr(), down_event_id, down_value, up_event_id, up_value, maskable as i32, )) } } /// Set the priority for a notification group. pub fn set_notification_group_priority( &self, group_id: sys::SIMCONNECT_NOTIFICATION_GROUP_ID, priority: sys::DWORD, ) -> Result<()> { unsafe { map_err(sys::SimConnect_SetNotificationGroupPriority( self.handle.as_ptr() as sys::HANDLE, group_id, priority, )) } } /// Remove a client defined event from a notification group. pub fn remove_client_event( &self, group_id: sys::SIMCONNECT_NOTIFICATION_GROUP_ID, event_id: sys::SIMCONNECT_CLIENT_EVENT_ID, ) -> Result<()> { unsafe { map_err(sys::SimConnect_RemoveClientEvent( self.handle.as_ptr() as sys::HANDLE, group_id, event_id, )) } } /// Associate a data definition with a client defined object definition. pub fn add_data_definition<T: DataDefinition>( &self, define_id: sys::SIMCONNECT_DATA_DEFINITION_ID, ) -> Result<()> { for (datum_name, units_type, datatype) in T::DEFINITIONS { let datum_name = std::ffi::CString::new(*datum_name).unwrap(); let units_type = std::ffi::CString::new(*units_type).unwrap(); unsafe { map_err(sys::SimConnect_AddToDataDefinition( self.handle.as_ptr() as sys::HANDLE, define_id, datum_name.as_ptr(), units_type.as_ptr(), *datatype, 0.0, 0, ))?; } } Ok(()) } /// Make changes to the data properties of an object. pub fn set_data_on_sim_object<T: DataDefinition>( &self, define_id: sys::SIMCONNECT_DATA_DEFINITION_ID, object_id: sys::SIMCONNECT_OBJECT_ID, data: &T, ) -> Result<()> { unsafe { map_err(sys::SimConnect_SetDataOnSimObject( self.handle.as_ptr() as sys::HANDLE, define_id, object_id, 0, 0, std::mem::size_of_val(data) as sys::DWORD, data as *const T as *mut std::ffi::c_void, )) } } } /// Message received from `SimConnect::get_next_dispatch`. #[derive(Debug)] pub enum SimConnectRecv<'a> { Null, Exception(&'a sys::SIMCONNECT_RECV_EXCEPTION), Open(&'a sys::SIMCONNECT_RECV_OPEN), Quit(&'a sys::SIMCONNECT_RECV_QUIT), Event(&'a sys::SIMCONNECT_RECV_EVENT), } impl Drop for SimConnect { fn drop(&mut self) { assert!(unsafe { sys::SimConnect_Close(self.handle.as_ptr() as sys::HANDLE) } >= 0); } }
#[doc = "Register `DDRPERFM_STATUS` reader"] pub type R = crate::R<DDRPERFM_STATUS_SPEC>; #[doc = "Field `COVF` reader - COVF"] pub type COVF_R = crate::FieldReader; #[doc = "Field `BUSY` reader - BUSY"] pub type BUSY_R = crate::BitReader; #[doc = "Field `TOVF` reader - TOVF"] pub type TOVF_R = crate::BitReader; impl R { #[doc = "Bits 0:3 - COVF"] #[inline(always)] pub fn covf(&self) -> COVF_R { COVF_R::new((self.bits & 0x0f) as u8) } #[doc = "Bit 16 - BUSY"] #[inline(always)] pub fn busy(&self) -> BUSY_R { BUSY_R::new(((self.bits >> 16) & 1) != 0) } #[doc = "Bit 31 - TOVF"] #[inline(always)] pub fn tovf(&self) -> TOVF_R { TOVF_R::new(((self.bits >> 31) & 1) != 0) } } #[doc = "DDRPERFM status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ddrperfm_status::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct DDRPERFM_STATUS_SPEC; impl crate::RegisterSpec for DDRPERFM_STATUS_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`ddrperfm_status::R`](R) reader structure"] impl crate::Readable for DDRPERFM_STATUS_SPEC {} #[doc = "`reset()` method sets DDRPERFM_STATUS to value 0"] impl crate::Resettable for DDRPERFM_STATUS_SPEC { const RESET_VALUE: Self::Ux = 0; }
fn main() { let string = "main"; let mut char_string = string.chars(); println!("{:?}",char_string.next()); }
#[doc = "Register `IFCR` writer"] pub type W = crate::W<IFCR_SPEC>; #[doc = "Field `CGIF1` writer - global interrupt flag clear for channel 1"] pub type CGIF1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CTCIF1` writer - transfer complete flag clear for channel 1"] pub type CTCIF1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CHTIF1` writer - half transfer flag clear for channel 1"] pub type CHTIF1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CTEIF1` writer - transfer error flag clear for channel 1"] pub type CTEIF1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CGIF2` writer - global interrupt flag clear for channel 2"] pub type CGIF2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CTCIF2` writer - transfer complete flag clear for channel 2"] pub type CTCIF2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CHTIF2` writer - half transfer flag clear for channel 2"] pub type CHTIF2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CTEIF2` writer - transfer error flag clear for channel 2"] pub type CTEIF2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CGIF3` writer - global interrupt flag clear for channel 3"] pub type CGIF3_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CTCIF3` writer - transfer complete flag clear for channel 3"] pub type CTCIF3_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CHTIF3` writer - half transfer flag clear for channel 3"] pub type CHTIF3_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CTEIF3` writer - transfer error flag clear for channel 3"] pub type CTEIF3_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl W { #[doc = "Bit 0 - global interrupt flag clear for channel 1"] #[inline(always)] #[must_use] pub fn cgif1(&mut self) -> CGIF1_W<IFCR_SPEC, 0> { CGIF1_W::new(self) } #[doc = "Bit 1 - transfer complete flag clear for channel 1"] #[inline(always)] #[must_use] pub fn ctcif1(&mut self) -> CTCIF1_W<IFCR_SPEC, 1> { CTCIF1_W::new(self) } #[doc = "Bit 2 - half transfer flag clear for channel 1"] #[inline(always)] #[must_use] pub fn chtif1(&mut self) -> CHTIF1_W<IFCR_SPEC, 2> { CHTIF1_W::new(self) } #[doc = "Bit 3 - transfer error flag clear for channel 1"] #[inline(always)] #[must_use] pub fn cteif1(&mut self) -> CTEIF1_W<IFCR_SPEC, 3> { CTEIF1_W::new(self) } #[doc = "Bit 4 - global interrupt flag clear for channel 2"] #[inline(always)] #[must_use] pub fn cgif2(&mut self) -> CGIF2_W<IFCR_SPEC, 4> { CGIF2_W::new(self) } #[doc = "Bit 5 - transfer complete flag clear for channel 2"] #[inline(always)] #[must_use] pub fn ctcif2(&mut self) -> CTCIF2_W<IFCR_SPEC, 5> { CTCIF2_W::new(self) } #[doc = "Bit 6 - half transfer flag clear for channel 2"] #[inline(always)] #[must_use] pub fn chtif2(&mut self) -> CHTIF2_W<IFCR_SPEC, 6> { CHTIF2_W::new(self) } #[doc = "Bit 7 - transfer error flag clear for channel 2"] #[inline(always)] #[must_use] pub fn cteif2(&mut self) -> CTEIF2_W<IFCR_SPEC, 7> { CTEIF2_W::new(self) } #[doc = "Bit 8 - global interrupt flag clear for channel 3"] #[inline(always)] #[must_use] pub fn cgif3(&mut self) -> CGIF3_W<IFCR_SPEC, 8> { CGIF3_W::new(self) } #[doc = "Bit 9 - transfer complete flag clear for channel 3"] #[inline(always)] #[must_use] pub fn ctcif3(&mut self) -> CTCIF3_W<IFCR_SPEC, 9> { CTCIF3_W::new(self) } #[doc = "Bit 10 - half transfer flag clear for channel 3"] #[inline(always)] #[must_use] pub fn chtif3(&mut self) -> CHTIF3_W<IFCR_SPEC, 10> { CHTIF3_W::new(self) } #[doc = "Bit 11 - transfer error flag clear for channel 3"] #[inline(always)] #[must_use] pub fn cteif3(&mut self) -> CTEIF3_W<IFCR_SPEC, 11> { CTEIF3_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "DMA interrupt flag clear register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ifcr::W`](W). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct IFCR_SPEC; impl crate::RegisterSpec for IFCR_SPEC { type Ux = u32; } #[doc = "`write(|w| ..)` method takes [`ifcr::W`](W) writer structure"] impl crate::Writable for IFCR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets IFCR to value 0"] impl crate::Resettable for IFCR_SPEC { const RESET_VALUE: Self::Ux = 0; }
use chrono::{DateTime, Utc}; pub type Measurement = u16; #[derive(Debug)] pub struct MoistureEvent { pub time: DateTime<Utc>, pub name: String, pub value: Measurement } impl super::ToInfluxDB for MoistureEvent { fn to_line(&self) -> String { format!("moisture,name={} value={} {}", self.name, self.value, self.time.timestamp() ) } }
use crate::utils::wait_until; use crate::{Net, Spec, TestProtocol}; use ckb_sync::{NetworkProtocol, MAX_LOCATOR_SIZE}; use ckb_types::{ h256, packed::{Byte32, GetHeaders, SyncMessage}, prelude::*, H256, }; use log::info; pub struct InvalidLocatorSize; impl Spec for InvalidLocatorSize { crate::name!("invalid_locator_size"); crate::setup!(protocols: vec![TestProtocol::sync()]); fn run(&self, net: &mut Net) { info!("Connect node0"); net.exit_ibd_mode(); let node0 = &net.nodes[0]; net.connect(node0); // get peer_id from GetHeaders message let (peer_id, _, _) = net.receive(); let hashes: Vec<Byte32> = (0..=MAX_LOCATOR_SIZE) .map(|_| h256!("0x1").pack()) .collect(); let message = SyncMessage::new_builder() .set( GetHeaders::new_builder() .block_locator_hashes(hashes.pack()) .build(), ) .build() .as_bytes(); net.send(NetworkProtocol::SYNC.into(), peer_id, message); let rpc_client = net.nodes[0].rpc_client(); let ret = wait_until(10, || rpc_client.get_peers().is_empty()); assert!(ret, "Node0 should disconnect test node"); net.connect(node0); let ret = wait_until(10, || !rpc_client.get_peers().is_empty()); assert!(!ret, "Node0 should ban test node"); } }
use crate::engine::{element::Element, *}; pub struct SpriteRenderer { path: String, // Idk how to store the sprite, now I am using a cache :/ } impl SpriteRenderer { pub fn new(path: String) -> Box<dyn Component> { Box::new(SpriteRenderer { path: path }) } } impl Component for SpriteRenderer { fn on_collision(&mut self) -> Result<(), String> { Ok(()) } fn on_update(&self, _parent: &Element, _events: &Vec<Event>) -> Result<Option<ElementData>, String> { Ok(None) } fn on_draw(&self, parent: &Element, renderer: &mut dyn Renderer) -> Result<(), String> { let rect = parent.data.position.clone(); let rot = parent.data.rotation; renderer.copy(&self.path, rect, rot)?; Ok(()) } }
use std::{collections::HashSet, time::Duration}; use pretty_assertions::assert_eq; use super::{LookupHosts, SrvPollingMonitor}; use crate::{ error::Result, options::{ClientOptions, ServerAddress}, runtime, sdam::Topology, test::{log_uncaptured, CLIENT_OPTIONS}, }; fn localhost_test_build_10gen(port: u16) -> ServerAddress { ServerAddress::Tcp { host: "localhost.test.build.10gen.cc".into(), port: Some(port), } } lazy_static::lazy_static! { static ref DEFAULT_HOSTS: Vec<ServerAddress> = vec![ localhost_test_build_10gen(27017), localhost_test_build_10gen(27108), ]; } async fn run_test(new_hosts: Result<Vec<ServerAddress>>, expected_hosts: HashSet<ServerAddress>) { let mut options = ClientOptions::new_srv(); options.hosts = DEFAULT_HOSTS.clone(); options.test_options_mut().disable_monitoring_threads = true; let mut topology = Topology::new(options.clone()).unwrap(); topology.watch().wait_until_initialized().await; let mut monitor = SrvPollingMonitor::new(topology.clone_updater(), topology.watch(), options.clone()) .unwrap(); monitor .update_hosts(new_hosts.and_then(make_lookup_hosts)) .await; assert_eq!(expected_hosts, topology.server_addresses()); } fn make_lookup_hosts(hosts: Vec<ServerAddress>) -> Result<LookupHosts> { Ok(LookupHosts { hosts: hosts.into_iter().map(Result::Ok).collect(), min_ttl: Duration::from_secs(60), }) } // If a new DNS record is returned, it should be reflected in the topology. #[cfg_attr(feature = "tokio-runtime", tokio::test)] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn add_new_dns_record() { let hosts = vec![ localhost_test_build_10gen(27017), localhost_test_build_10gen(27018), localhost_test_build_10gen(27019), ]; run_test(Ok(hosts.clone()), hosts.into_iter().collect()).await; } // If a DNS record is no longer returned, it should be reflected in the topology. #[cfg_attr(feature = "tokio-runtime", tokio::test)] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn remove_dns_record() { let hosts = vec![localhost_test_build_10gen(27017)]; run_test(Ok(hosts.clone()), hosts.into_iter().collect()).await; } // If a single DNS record is replaced, it should be reflected in the topology. #[cfg_attr(feature = "tokio-runtime", tokio::test)] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn replace_single_dns_record() { let hosts = vec![ localhost_test_build_10gen(27017), localhost_test_build_10gen(27019), ]; run_test(Ok(hosts.clone()), hosts.into_iter().collect()).await; } // If all DNS records are replaced, it should be reflected in the topology. #[cfg_attr(feature = "tokio-runtime", tokio::test)] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn replace_all_dns_records() { let hosts = vec![localhost_test_build_10gen(27019)]; run_test(Ok(hosts.clone()), hosts.into_iter().collect()).await; } // If a timeout error occurs, the topology should be unchanged. #[cfg_attr(feature = "tokio-runtime", tokio::test)] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn timeout_error() { run_test( Err(std::io::ErrorKind::TimedOut.into()), DEFAULT_HOSTS.iter().cloned().collect(), ) .await; } // If no results are returned, the topology should be unchanged. #[cfg_attr(feature = "tokio-runtime", tokio::test)] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn no_results() { run_test(Ok(Vec::new()), DEFAULT_HOSTS.iter().cloned().collect()).await; } // SRV polling is not done for load-balanced clusters (as per spec at // https://github.com/mongodb/specifications/blob/master/source/polling-srv-records-for-mongos-discovery/tests/README.rst#test-that-srv-polling-is-not-done-for-load-balalanced-clusters). #[cfg_attr(feature = "tokio-runtime", tokio::test)] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn load_balanced_no_srv_polling() { if CLIENT_OPTIONS.get().await.load_balanced != Some(true) { log_uncaptured("skipping load_balanced_no_srv_polling due to not load balanced topology"); return; } let hosts = vec![localhost_test_build_10gen(27017)]; let mut options = ClientOptions::new_srv(); let rescan_interval = options.original_srv_info.as_ref().cloned().unwrap().min_ttl; options.hosts = hosts.clone(); options.load_balanced = Some(true); options.test_options_mut().mock_lookup_hosts = Some(make_lookup_hosts(vec![ localhost_test_build_10gen(27017), localhost_test_build_10gen(27018), ])); let mut topology = Topology::new(options).unwrap(); topology.watch().wait_until_initialized().await; runtime::delay_for(rescan_interval * 2).await; assert_eq!( hosts.into_iter().collect::<HashSet<_>>(), topology.server_addresses() ); }
mod test_notifier; mod test_verifier;
enum IpAddrKind { V4, V6, } struct Ipv4Addr { address: (u8, u8, u8, u8) } struct Ipv6Addr { address: String } enum IpAddr { V4(Ipv4Addr), V6(Ipv6Addr), } fn main() { let v4_addr = Ipv4Addr { address: (127, 0, 0, 1) }; let home = IpAddr::V4(v4_addr); let v6_addr = Ipv6Addr { address: String::from("::1") }; let loopback = IpAddr::V6(v6_addr); // println!("home: {:#?}", home); }
fn read<T: std::str::FromStr>() -> T { let mut s = String::new(); std::io::stdin().read_line(&mut s).ok(); s.trim().parse().ok().unwrap() } fn read_vec<T: std::str::FromStr>() -> Vec<T> { read::<String>() .split_whitespace() .map(|e| e.parse().ok().unwrap()) .collect() } fn read_vec2<T: std::str::FromStr>(n: usize) -> Vec<Vec<T>> { (0..n).map(|_| read_vec()).collect() } fn read_col<T: std::str::FromStr>(n: usize) -> Vec<T> { (0..n).map(|_| read()).collect() } fn f(n: u32) -> u32 { let mut tmp = n; let mut cnt: u32 = 0; while tmp > 0 { cnt += 1; tmp = tmp / 10; } cnt } fn pow(a: u32, b: u32) -> u32 { let mut ans: u32 = 1; for _ in 0..b { ans *= a; } ans } fn main() { let s: String = read(); let v: Vec<char> = s.chars().collect(); let l = v.len(); let mut ans: Vec<u32> = vec![0; l]; let mut distance_left_R: Vec<usize> = vec![0; l]; let mut rightest_r: usize = 0; for i in 0..l { if v[i] == 'R' { rightest_r = i; distance_left_R[i] = i; } else { distance_left_R[i] = rightest_r; } } let mut hoge: Vec<usize> = (0..l).collect(); hoge.reverse(); let it = hoge.iter(); let mut distance_right_L: Vec<usize> = vec![0; l]; let mut leftest_l: usize = 0; for &i in it { if v[i] == 'L' { leftest_l = i; distance_right_L[i] = i; } else { distance_right_L[i] = leftest_l; } } for i in 0..l { if v[i] == 'L' { let j = distance_left_R[i]; if (i - j) % 2 == 0 { ans[j] += 1; } else { ans[j + 1] += 1; } } else { let j = distance_right_L[i]; if (j - i) % 2 == 0 { ans[j] += 1; } else { ans[j - 1] += 1; } } } for i in 0..l { print!("{}", ans[i]); if i < l - 1 { print!(" "); } else { println!(""); } } }
use std::{ collections::BTreeMap, path::{Path, PathBuf}, fmt, error::Error, }; use regex::Regex; use yaml_rust::{Yaml, YamlLoader, ScanError}; use crate::{ ContextHandle, Polarity, DotId, Content, PartialContent, ContentFormat, content::{PolyForContent, MonoForContent}, }; #[derive(Clone, Debug)] pub(crate) enum YamlScriptError { LexingFailure(ScanError), Empty, Multiple, NotADict, KeyNotString, NameNotString, NameDup, PolyInvalid, PolyAmbiguous, ShortPolyWithWords, MonoInvalid, LinkInvalid, LinkReversed, LinkList, } impl fmt::Display for YamlScriptError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use YamlScriptError::*; match self { LexingFailure(err) => write!(f, "Lexing failed: {}", err), Empty => write!(f, "YAML description is empty"), Multiple => write!(f, "Multiple YAML descriptions"), NotADict => write!(f, "Bad YAML description (not a dictionary)"), KeyNotString => write!(f, "Non-string key in YAML description"), NameNotString => write!(f, "Non-string c-e structure name in YAML description"), NameDup => write!(f, "Duplicated c-e structure name in YAML description"), PolyInvalid => write!(f, "Invalid YAML description of a polynomial"), PolyAmbiguous => write!(f, "Ambiguous YAML description of a polynomial"), ShortPolyWithWords => write!( f, "Multi-word dot name is invalid in short YAML description of a polynomial" ), MonoInvalid => write!(f, "Invalid monomial in YAML description of a polynomial"), LinkInvalid => write!(f, "Invalid link in YAML description of a polynomial"), LinkReversed => write!(f, "Reversed link in YAML description of a polynomial"), LinkList => write!(f, "Link list is invalid in YAML description of a polynomial"), } } } impl Error for YamlScriptError {} impl From<ScanError> for YamlScriptError { #[inline] fn from(err: ScanError) -> Self { YamlScriptError::LexingFailure(err) } } fn do_share_name<S: AsRef<str>>( ctx: &ContextHandle, name: S, single_word_only: bool, ) -> Result<DotId, YamlScriptError> { if single_word_only && name.as_ref().contains(char::is_whitespace) { Err(YamlScriptError::ShortPolyWithWords) } else { Ok(ctx.lock().unwrap().share_dot_name(name)) } } fn post_process_port_description<S: AsRef<str>>( ctx: &ContextHandle, description: S, single_word_only: bool, ) -> Result<Vec<DotId>, YamlScriptError> { if description.as_ref().contains(',') { let result: Result<Vec<DotId>, YamlScriptError> = description .as_ref() .split(',') .map(|s| do_share_name(ctx, s.trim(), single_word_only)) .collect(); let ids = result?; Ok(ids) } else { let id = do_share_name(ctx, description.as_ref().trim(), single_word_only)?; Ok(vec![id]) } } type PortParsed = (Vec<DotId>, Polarity); fn do_parse_port_description<S: AsRef<str>>( ctx: &ContextHandle, description: S, single_word_only: bool, ) -> Result<Option<PortParsed>, YamlScriptError> { lazy_static! { // Dot name (untrimmed, unseparated) is any nonempty string not ending in '>' or '<'. // Removal of leading and trailing whitespace is done in post processing, // as well as comma-separation. static ref TX_RE: Regex = Regex::new(r"^(.*[^><])(>+|\s+effects)$").unwrap(); static ref RX_RE: Regex = Regex::new(r"^(.*[^><])(<+|\s+causes)$").unwrap(); } if let Some(cap) = TX_RE.captures(description.as_ref()) { let ids = post_process_port_description(ctx, &cap[1], single_word_only)?; Ok(Some((ids, Polarity::Tx))) } else if let Some(cap) = RX_RE.captures(description.as_ref()) { let ids = post_process_port_description(ctx, &cap[1], single_word_only)?; Ok(Some((ids, Polarity::Rx))) } else { Ok(None) } } fn parse_port_description<S: AsRef<str>>( ctx: &ContextHandle, description: S, ) -> Option<PortParsed> { do_parse_port_description(ctx, description, false).unwrap_or_else(|_| unreachable!()) } fn parse_link_description<S: AsRef<str> + Copy>( ctx: &ContextHandle, description: S, valid_polarity: Polarity, single_word_only: bool, ) -> Result<(DotId, bool), YamlScriptError> { let link_with_colink = do_parse_port_description(ctx, description, single_word_only)?; if let Some((ids, polarity)) = link_with_colink { if polarity == valid_polarity { if ids.len() == 1 { Ok((ids[0], true)) } else { Err(YamlScriptError::LinkList) } } else { Err(YamlScriptError::LinkReversed) } } else { let id = do_share_name(ctx, description, single_word_only)?; Ok((id, false)) } } /// Intermediate representation of a c-e structure. /// /// This is returned by the parser of YAML-formatted strings and then /// transformed into internal data structures, wich are used for /// analysis or during simulation. #[derive(Debug)] pub(crate) struct YamlContent { name: Option<String>, meta: BTreeMap<String, Yaml>, content: PartialContent, } impl YamlContent { pub(crate) fn new(ctx: &ContextHandle) -> Self { YamlContent { name: Default::default(), meta: Default::default(), content: PartialContent::new(ctx), } } fn add_ports( &mut self, ids: &[DotId], polarity: Polarity, poly_yaml: &Yaml, ) -> Result<(), YamlScriptError> { assert!(!ids.is_empty()); let mut poly_content = PolyForContent::new(); match poly_yaml { Yaml::String(other_name) => { let (other_id, with_colink) = parse_link_description( self.content.get_context(), other_name.trim(), !polarity, true, )?; poly_content.add_mono(vec![other_id]); if with_colink { if polarity == Polarity::Tx { self.content.add_to_causes(other_id, &[ids.to_owned()]); } else { self.content.add_to_effects(other_id, &[ids.to_owned()]); } } } Yaml::Array(table) => { let mut is_flat = true; for value in table { match value { Yaml::String(other_name) => { let (other_id, with_colink) = parse_link_description( self.content.get_context(), other_name.trim(), !polarity, true, )?; poly_content.add_mono(vec![other_id]); if with_colink { if polarity == Polarity::Tx { self.content.add_to_causes(other_id, &[ids.to_owned()]); } else { self.content.add_to_effects(other_id, &[ids.to_owned()]); } } } Yaml::Array(table) => { is_flat = false; let mut mono_content = MonoForContent::new(); for value in table { if let Some(other_name) = value.as_str() { let (other_id, with_colink) = parse_link_description( self.content.get_context(), other_name.trim(), !polarity, false, )?; mono_content.add_dot(other_id); if with_colink { if polarity == Polarity::Tx { self.content.add_to_causes(other_id, &[ids.to_owned()]); } else { self.content .add_to_effects(other_id, &[ids.to_owned()]); } } } else { return Err(YamlScriptError::LinkInvalid) } } poly_content.add_mono(mono_content.into_content()); } _ => return Err(YamlScriptError::MonoInvalid), } } if is_flat { return Err(YamlScriptError::PolyAmbiguous) } } _ => return Err(YamlScriptError::PolyInvalid), } if polarity == Polarity::Tx { for &id in ids { self.content.add_to_effects(id, poly_content.as_content()); } } else { for &id in ids { self.content.add_to_causes(id, poly_content.as_content()); } } Ok(()) } fn add_entry(&mut self, key: &Yaml, value: &Yaml) -> Result<(), YamlScriptError> { if let Some(key) = key.as_str() { let key = key.trim(); let port_parsed = parse_port_description(self.content.get_context(), key); if let Some((ids, polarity)) = port_parsed { self.add_ports(&ids, polarity, value) } else if key == "name" { if let Some(name) = value.as_str() { if self.name.is_none() { self.name = Some(name.trim().to_owned()); Ok(()) } else { Err(YamlScriptError::NameDup) } } else { Err(YamlScriptError::NameNotString) } } else { // FIXME handle duplicates self.meta.insert(key.to_string(), value.clone()); Ok(()) } } else { Err(YamlScriptError::KeyNotString) } } fn with_yaml(mut self, yaml: &Yaml) -> Result<Self, YamlScriptError> { if let Yaml::Hash(ref dict) = yaml { for (key, value) in dict { self.add_entry(key, value)?; } Ok(self) } else { Err(YamlScriptError::NotADict) } } pub(crate) fn with_str<S: AsRef<str>>(self, script: S) -> Result<Self, YamlScriptError> { let docs = YamlLoader::load_from_str(script.as_ref())?; if docs.is_empty() { Err(YamlScriptError::Empty) } else if docs.len() == 1 { self.with_yaml(&docs[0]) } else { Err(YamlScriptError::Multiple) } } pub(crate) fn from_str<S: AsRef<str>>( ctx: &ContextHandle, script: S, ) -> Result<Self, YamlScriptError> { Self::new(ctx).with_str(script) } } impl Content for YamlContent { #[inline] fn get_script(&self) -> Option<&str> { None // FIXME } #[inline] fn get_name(&self) -> Option<&str> { self.name.as_deref() } #[inline] fn is_module(&self) -> bool { false } #[inline] fn get_carrier_ids(&mut self) -> Vec<DotId> { self.content.get_carrier_ids() } #[inline] fn get_causes_by_id(&self, id: DotId) -> Option<&Vec<Vec<DotId>>> { self.content.get_causes_by_id(id) } #[inline] fn get_effects_by_id(&self, id: DotId) -> Option<&Vec<Vec<DotId>>> { self.content.get_effects_by_id(id) } } #[derive(Clone, Default, Debug)] pub struct YamlFormat { path: Option<PathBuf>, } impl YamlFormat { pub fn new() -> Self { Default::default() } pub fn from_path<P: AsRef<Path>>(path: P) -> Self { let path = path.as_ref().to_path_buf(); YamlFormat { path: Some(path) } } } impl ContentFormat for YamlFormat { fn expected_extensions(&self) -> &[&str] { &["cex"] } fn script_is_acceptable(&self, _script: &str) -> bool { true // FIXME } fn script_to_content( &self, ctx: &ContextHandle, script: &str, _root_name: Option<&str>, ) -> Result<Box<dyn Content>, Box<dyn Error>> { YamlContent::from_str(ctx, script).map(Into::into).map_err(Into::into) } } #[cfg(test)] mod tests { use crate::Context; use super::*; fn get_dot_id(ctx: &ContextHandle, name: &str) -> DotId { ctx.lock().unwrap().get_dot_id(name).unwrap() } #[test] fn test_empty() { let ref ctx = Context::new_toplevel("yaml_script::test_empty"); let mut script = YamlContent::new(ctx); let carrier = script.get_carrier_ids(); assert_eq!(carrier, vec![]); } #[test] fn test_arrow() { let ref ctx = Context::new_toplevel("yaml_script::test_arrow"); let mut script = YamlContent::from_str(ctx, "a >: z <").unwrap(); let carrier = script.get_carrier_ids(); let a = get_dot_id(ctx, "a"); let z = get_dot_id(ctx, "z"); assert_eq!(carrier, vec![a, z]); let causes = script.get_causes_by_id(z).unwrap(); let effects = script.get_effects_by_id(a).unwrap(); assert_eq!(causes, &vec![vec![a]]); assert_eq!(effects, &vec![vec![z]]); } }
use crate::{ import::*, error::* }; /// This type can be used when you need a concrete type as Address<M>. Eg, /// you can store this as BoxAny and then use down_cast from std::any::Any. // pub struct Receiver<M: Message> { rec: Pin<BoxAddress<M, ThesErr>> } impl<M: Message> Receiver<M> { /// Create a new Receiver // pub fn new( rec: BoxAddress<M, ThesErr> ) -> Self { Self { rec: Pin::from( rec ) } } } impl<M: Message> Clone for Receiver<M> { fn clone( &self ) -> Self { Self { rec: Pin::from( self.rec.clone_box() ) } } } impl<M: Message> fmt::Debug for Receiver<M> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "Receiver: {:?}", &self.rec ) } } /// Verify whether 2 Receivers will deliver to the same actor // impl<M: Message> PartialEq for Receiver<M> { fn eq( &self, other: &Self ) -> bool { self.rec.id() == other.rec.id() } } impl<M: Message> Eq for Receiver<M>{} impl<M: Message> Address<M> for Receiver<M> { fn call( &mut self, msg: M ) -> Return<'_, Result< <M as Message>::Return, <Self as Sink<M>>::Error >> { Box::pin( async move { self.rec.call( msg ).await }) } fn clone_box( &self ) -> BoxAddress<M, ThesErr> { self.rec.clone_box() } } impl<M: Message> Identify for Receiver<M> { fn id( &self ) -> usize { self.rec.id() } fn name( &self ) -> Option<Arc<str>> { self.rec.name() } } impl<M: Message> Sink<M> for Receiver<M> { type Error = ThesErr; fn poll_ready( mut self: Pin<&mut Self>, cx: &mut TaskContext<'_> ) -> Poll<Result<(), Self::Error>> { self.rec.as_mut().poll_ready( cx ) } fn start_send( mut self: Pin<&mut Self>, msg: M ) -> Result<(), Self::Error> { self.rec.as_mut().start_send( msg ) } fn poll_flush( mut self: Pin<&mut Self>, cx: &mut TaskContext<'_> ) -> Poll<Result<(), Self::Error>> { self.rec.as_mut().poll_flush( cx ) } /// Will only close when dropped, this method can never return ready // fn poll_close( mut self: Pin<&mut Self>, cx: &mut TaskContext<'_> ) -> Poll<Result<(), Self::Error>> { self.rec.as_mut().poll_close( cx ) } }
use ::std::*; /** --- Part Two --- The air conditioner comes online! Its cold air feels good for a while, but then the TEST alarms start to go off. Since the air conditioner can't vent its heat anywhere but back into the spacecraft, it's actually making the air inside the ship warmer. Instead, you'll need to use the TEST to extend the thermal radiators. Fortunately, the diagnostic program (your puzzle input) is already equipped for this. Unfortunately, your Intcode computer is not. Your computer is only missing a few opcodes: Opcode 5 is jump-if-true: if the first parameter is non-zero, it sets the instruction pointer to the value from the second parameter. Otherwise, it does nothing. Opcode 6 is jump-if-false: if the first parameter is zero, it sets the instruction pointer to the value from the second parameter. Otherwise, it does nothing. Opcode 7 is less than: if the first parameter is less than the second parameter, it stores 1 in the position given by the third parameter. Otherwise, it stores 0. Opcode 8 is equals: if the first parameter is equal to the second parameter, it stores 1 in the position given by the third parameter. Otherwise, it stores 0. Like all instructions, these instructions need to support parameter modes as described above. Normally, after an instruction is finished, the instruction pointer increases by the number of values in that instruction. However, if the instruction modifies the instruction pointer, that value is used and the instruction pointer is not automatically increased. For example, here are several programs that take one input, compare it to the value 8, and then produce one output: 3,9,8,9,10,9,4,9,99,-1,8 - Using position mode, consider whether the input is equal to 8; output 1 (if it is) or 0 (if it is not). 3,9,7,9,10,9,4,9,99,-1,8 - Using position mode, consider whether the input is less than 8; output 1 (if it is) or 0 (if it is not). 3,3,1108,-1,8,3,4,3,99 - Using immediate mode, consider whether the input is equal to 8; output 1 (if it is) or 0 (if it is not). 3,3,1107,-1,8,3,4,3,99 - Using immediate mode, consider whether the input is less than 8; output 1 (if it is) or 0 (if it is not). Here are some jump tests that take an input, then output 0 if the input was zero or 1 if the input was non-zero: 3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9 (using position mode) 3,3,1105,-1,9,1101,0,0,12,4,12,99,1 (using immediate mode) Here's a larger example: 3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31, 1106,0,36,98,0,0,1002,21,125,20,4,20,1105,1,46,104, 999,1105,1,46,1101,1000,1,20,4,20,1105,1,46,98,99 The above example program uses an input instruction to ask for a single number. The program will then output 999 if the input value is below 8, output 1000 if the input value is equal to 8, or output 1001 if the input value is greater than 8. This time, when the TEST diagnostic program runs its input instruction to get the ID of the system to test, provide it 5, the ID for the ship's thermal radiator controller. This diagnostic test suite only outputs one number, the diagnostic code. What is the diagnostic code for system ID 5? */ use num_derive::FromPrimitive; use num_traits::{pow, FromPrimitive}; use std::string::String; #[derive(FromPrimitive, PartialEq)] enum OpCode { Add = 1, Multiply = 2, Input = 3, Output = 4, JumpIfTrue = 5, JumpIfNot = 6, LessThan = 7, Equals = 8, End = 99, } fn get_reg(ro_program: &[i32], pc: usize, parameter_mode: i32, position: usize) -> i32 { let val = ro_program[pc + position]; let digit = pow(10, position - 1); if (parameter_mode / digit) % 10 == 1 { val } else { ro_program[val as usize] } } trait Terminal { fn input(&mut self) -> String; fn output(&mut self, val: i32); } struct CommandLineTerminal {} impl Terminal for CommandLineTerminal { fn input(&mut self) -> String { let mut val = String::new(); io::stdin() .read_line(&mut val) .expect("Failed to read from stdin"); val } fn output(&mut self, val: i32) { print!("{}", val); } } fn execute_program(term: &mut impl Terminal, program: &mut [i32]) { let mut pc: usize = 0; loop { let opcode = FromPrimitive::from_i32(program[pc] % 100).expect("Segfault"); let parameter_mode = program[pc] / 100; match opcode { OpCode::End => break, OpCode::Add | OpCode::Multiply => { let target: usize = program[pc + 3] as usize; let r1 = get_reg(&program, pc, parameter_mode, 1); let r2 = get_reg(&program, pc, parameter_mode, 2); program[target] = match opcode { OpCode::Add => r1 + r2, OpCode::Multiply => r1 * r2, _ => unreachable!(), }; pc += 4; } OpCode::Input => { let r1 = program[pc + 1]; let ret = term.input(); program[r1 as usize] = ret.trim().parse::<i32>().expect("Not an integer"); pc += 2; } OpCode::Output => { let r1 = get_reg(&program, pc, parameter_mode, 1); term.output(r1); pc += 2; } OpCode::JumpIfTrue | OpCode::JumpIfNot => { let r1 = get_reg(&program, pc, parameter_mode, 1); let r2 = get_reg(&program, pc, parameter_mode, 2); let condition = r1 != 0; pc = if (opcode == OpCode::JumpIfTrue) == condition { r2 as usize } else { pc + 3 } } OpCode::LessThan | OpCode::Equals => { let target: usize = program[pc + 3] as usize; let r1 = get_reg(&program, pc, parameter_mode, 1); let r2 = get_reg(&program, pc, parameter_mode, 2); program[target] = match opcode { OpCode::LessThan => (r1 < r2) as i32, OpCode::Equals => (r1 == r2) as i32, _ => unreachable!(), }; pc += 4; } }; } } fn main() -> std::io::Result<()> { let mut program: Vec<i32> = include_str!("../input") .split(',') .map(|s| s.parse::<i32>().expect("Not an integer")) .collect(); let mut terminal = CommandLineTerminal {}; execute_program(&mut terminal, &mut program); Ok(()) } #[cfg(test)] mod tests { use super::{execute_program, Terminal}; #[derive(Default)] struct TestTerminal { pub inputs: Vec<String>, pub outputs: Vec<i32>, } impl Terminal for TestTerminal { fn input(&mut self) -> String { self.inputs.remove(0) } fn output(&mut self, val: i32) { self.outputs.push(val); } } #[test] fn multiply_program() { let mut program: [i32; 5] = [1002, 4, 3, 4, 33]; let mut test_terminal = TestTerminal::default(); execute_program(&mut test_terminal, &mut program); assert_eq!(program[4], 99); } fn execute_with_input(term: &mut TestTerminal, ro_program: &[i32], input: &str) { use std::iter::FromIterator; let mut program = Vec::from_iter(ro_program.iter().cloned()); term.inputs.push(String::from(input)); execute_program(&mut *term, &mut program); } #[test] fn compare_program() { let mut test_terminal = TestTerminal::default(); let equal_program = [3, 9, 8, 9, 10, 9, 4, 9, 99, -1, 8]; execute_with_input(&mut test_terminal, &equal_program, "8\n"); assert_eq!(test_terminal.outputs.pop().expect("No outputs"), 1); execute_with_input(&mut test_terminal, &equal_program, "101\n"); assert_eq!(test_terminal.outputs.pop().expect("No outputs"), 0); let lessthan_program = [3, 9, 7, 9, 10, 9, 4, 9, 99, -1, 8]; execute_with_input(&mut test_terminal, &lessthan_program, "-100\n"); assert_eq!(test_terminal.outputs.pop().expect("No outputs"), 1); execute_with_input(&mut test_terminal, &lessthan_program, "9\n"); assert_eq!(test_terminal.outputs.pop().expect("No outputs"), 0); let immediateequal_program = [3, 3, 1108, -1, 8, 3, 4, 3, 99]; execute_with_input(&mut test_terminal, &immediateequal_program, "8\n"); assert_eq!(test_terminal.outputs.pop().expect("No outputs"), 1); execute_with_input(&mut test_terminal, &immediateequal_program, "7\n"); assert_eq!(test_terminal.outputs.pop().expect("No outputs"), 0); let immediatelessthan_program = [3, 3, 1107, -1, 8, 3, 4, 3, 99]; execute_with_input(&mut test_terminal, &immediatelessthan_program, "-100\n"); assert_eq!(test_terminal.outputs.pop().expect("No outputs"), 1); execute_with_input(&mut test_terminal, &immediatelessthan_program, "9\n"); assert_eq!(test_terminal.outputs.pop().expect("No outputs"), 0); } #[test] fn jump_program() { let mut test_terminal = TestTerminal::default(); let ro_program = [ 3, 21, 1008, 21, 8, 20, 1005, 20, 22, 107, 8, 21, 20, 1006, 20, 31, 1106, 0, 36, 98, 0, 0, 1002, 21, 125, 20, 4, 20, 1105, 1, 46, 104, 999, 1105, 1, 46, 1101, 1000, 1, 20, 4, 20, 1105, 1, 46, 98, 99, ]; execute_with_input(&mut test_terminal, &ro_program, "0\n"); assert_eq!(test_terminal.outputs.pop().expect("No outputs"), 999); execute_with_input(&mut test_terminal, &ro_program, "8\n"); assert_eq!(test_terminal.outputs.pop().expect("No outputs"), 1000); execute_with_input(&mut test_terminal, &ro_program, "9\n"); assert_eq!(test_terminal.outputs.pop().expect("No outputs"), 1001); } }
use crate::error::{Error, UnderlyingError}; use crate::hash::Hash; use crate::snapshots::{FileMetadata, Snapshot}; use crate::storage::stream::{ReadEggExt, WriteEggExt}; use ahash; use byteorder::LittleEndian; use byteorder::{ReadBytesExt, WriteBytesExt}; use smallvec::SmallVec; use std::collections::HashMap; use std::collections::VecDeque; use std::fs; use std::io::{self, BufRead}; use std::path; use std::path::PathBuf; use std::time::{Duration, SystemTime}; type Result<T> = std::result::Result<T, Error>; // TODO: Redo this as a Vec and HashSet /// Represents a file in the working directory that we may wish to snapshot or otherwise investigate, /// This structure does not contain the path of the file since the path is used as a key inside a map of WorkingFiles struct WorkingFile { hash: Option<Hash>, file_size: u64, modified_time: u128, } // TODO: String can't be used here, we must use a byte array since the data may not be valid utf8 #[derive(PartialEq)] enum ProspectiveDifference<'a> { DuplicateRemove(&'a str, VecDeque<usize>), DuplicateInsert(&'a str, VecDeque<usize>), Remove(&'a str, usize), Insert(&'a str, usize), } impl<'a> std::fmt::Debug for ProspectiveDifference<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { // Convert to BTreeSet ProspectiveDifference::DuplicateInsert(line, duplicates) => f.write_fmt(format_args!( "The line '{}' has multiple inserts at {:?} in the new file", line, duplicates )), ProspectiveDifference::DuplicateRemove(line, duplicates) => f.write_fmt(format_args!( "The line '{}' has duplicate removals at {:?} in the original file", line, duplicates )), ProspectiveDifference::Insert(line, line_number) => f.write_fmt(format_args!( "The line {} was inserted at line {} in the new file", line, line_number )), ProspectiveDifference::Remove(line, line_number) => f.write_fmt(format_args!( "The line {} was removed from the original file at line {}", line, line_number )), } } } pub enum ProspectiveMove<'a> { // First usize is the slice line, second usize is the previous line UnknownMove(usize, usize, &'a str), // First usize is original line, second usize is the edited line Move(usize, usize, &'a str), // Multiple line move - original line, edit line, number of lines, slice of lines MultipleLines(usize, usize, usize, &'a [&'a str]), } impl<'a> std::fmt::Display for ProspectiveMove<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { ProspectiveMove::UnknownMove(slice_line, previous_line, line_text) => f.write_fmt(format_args!("The line '{}' was moved from slice line {} to previous line {}", line_text, slice_line, previous_line)), ProspectiveMove::Move(original_line, previous_line, line_data) => f.write_fmt(format_args!("The line '{}' was moved from line {} in the original file to line {} in the new file", line_data, original_line, previous_line)), ProspectiveMove::MultipleLines(original_line, previous_line, line_count, lines) => { f.write_fmt(format_args!("The lines {} to {} were moved to line {}", original_line, original_line + line_count, previous_line)) }, } } } // Prospective moves are guarenteed to be moves however the lines that are being moved may be changed // TODO: This all needs to be changed to a struct Move<'a> { line: &'a str, source_line: usize, new_line: usize, } impl<'a> Move<'a> { pub fn get_lines(&self) -> (usize, usize) { (self.source_line, self.new_line) } } impl<'a> std::fmt::Debug for Move<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_fmt(format_args!( "The line '{}' moved from {} to {}", self.line, self.source_line, self.new_line )) } } // Represents a diff where the underlying data is not owned pub enum Diff<'a> { Insert(&'a str, usize), Remove(&'a str, usize), DuplicateRemoves(&'a str, SmallVec<[usize; 5]>), DuplicateInserts(&'a str, SmallVec<[usize; 5]>), Moved(&'a str, usize, usize), Changed(&'a str, String, usize), } #[derive(PartialEq, Hash, Eq, Debug)] pub struct LineMoved { source_line: usize, // Lines are always moved from the original document to the edited document destination_line: usize, } impl std::fmt::Display for LineMoved { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_fmt(format_args!("{} to {}", self.source_line, self.destination_line)) } } pub struct RawZip<A, B> { a: A, b: B, } impl<A, B> Iterator for RawZip<A, B> where A: Iterator, B: Iterator, { type Item = (Option<A::Item>, Option<B::Item>); fn next(&mut self) -> Option<Self::Item> { match (self.a.next(), self.b.next()) { (None, None) => None, (a, b) => Some((a, b)), } // let x = self.a.next(); // let y = self.b.next(); // if x.is_none() && y.is_none() { // return None; // } // // As long as one of the iterators is returning Some we continue returning results // Some((x,y)) } } impl<A: Iterator, B: Iterator> RawZip<A, B> { pub fn new(a: A, b: B) -> RawZip<A, B> { RawZip { a, b } } } impl WorkingFile { pub fn is_hashed(&self) -> bool { self.hash.is_some() } pub fn hash(&self) -> Option<&Hash> { self.hash.as_ref() } pub fn filesize(&self) -> u64 { self.file_size } pub fn modified_time(&self) -> u128 { self.modified_time } } /// Contains a number of helpful functions for dealing with the Repositories working directory /// Primarily it provides an interface to check if a file(s) have changed since a snapshot was taken pub struct WorkingDirectory<'a> { working_files: HashMap<path::PathBuf, WorkingFile>, path_to_working: &'a path::Path, } impl<'a> WorkingDirectory<'a> { pub fn new(path_to_working: &'a path::Path) -> WorkingDirectory { WorkingDirectory { working_files: HashMap::new(), path_to_working, } } /// Looks at all the files in a directory and stores file size, file name and last modified file time, fn index_directory( path_to_search: &path::Path, files_found: &mut HashMap<path::PathBuf, WorkingFile>, directories_found: &mut Vec<PathBuf>, ) -> Result<()> { let items_found = match fs::read_dir(path_to_search) { Ok(result) => result, Err(error) => unimplemented!(), }; for item in items_found { let valid_item = match item { Ok(valid_item) => valid_item, Err(error) => unimplemented!(), }; let path = valid_item.path(); let file_type = match valid_item.file_type() { Ok(item_type) => item_type, Err(error) => unimplemented!(), }; if file_type.is_dir() { // TODO: check if the path is .egg as we need to ignore this // TODO: Technically we need to filter out the repository directory, so the .egg folder inside the working directory // TODO: Check for infinite recursion // self.path_to_working.join(".egg"); directories_found.push(path); } else { // Add the file to the list let metadata = match valid_item.metadata() { Ok(metadata) => metadata, Err(error) => unimplemented!(), }; let modified_time = WorkingDirectory::get_modified_time(&metadata); // let time_modified = date.elapsed(); let data = WorkingFile { hash: None, // We only need to provide a hash if the file system can't provide a length or modification time for the given file file_size: metadata.len(), modified_time, }; files_found.insert(path, data); } } Ok(()) } // Retrieves the last modified time of a file with microsecond resolution fn get_modified_time(file_metadata: &fs::Metadata) -> u128 { match file_metadata.modified() { Ok(valid_date) => match valid_date.duration_since(SystemTime::UNIX_EPOCH) { Ok(time_modified) => time_modified.as_micros(), Err(error) => unimplemented!(), // This means that the file was modified before the UNIX EPOCH }, Err(error) => unimplemented!(), // File system does not support obtaining the last modified file time - TODO: Fallback to using a hash and display a warning } } fn index_repository(&self) -> Result<HashMap<path::PathBuf, WorkingFile>> { // All files must be relative to the working directory as that is how snapshot paths are stored let mut directories_to_search = Vec::new(); let mut files_found = HashMap::new(); WorkingDirectory::index_directory( self.path_to_working, &mut files_found, &mut directories_to_search, )?; // FIXME: This is subject to infinite recursion if user has created links to parent directory, the fix will need to be in index_directory while let Some(path_to_search) = directories_to_search.pop() { WorkingDirectory::index_directory( path_to_search.as_path(), &mut files_found, &mut directories_to_search, )?; } Ok(files_found) } /// Compares the list of hashed paths with the working directory pub fn get_changed_files<'b>(&self, stored_files: &'b [FileMetadata]) -> Vec<&'b path::Path> { // Get snapshot paths and their hashes // Lookup path in changed files and compare hashes // Get an update to date list of all files in the repositories working directory // let root_dir = ; let mut changed_files = Vec::new(); let files_found = match self.index_repository() { Ok(files_found) => files_found, Err(error) => unimplemented!(), }; for stored_file in stored_files { let working_file = match files_found.get(stored_file.path()) { Some(working_file) => working_file, None => { // The working directory has no file with that path // TODO: Every changed file needs a status associated with it - ie sometimes a file is deleted renamed or created, as opposed to just edited changed_files.push(stored_file.path()); // File exists in list but not in repository break; } }; if working_file.file_size != stored_file.filesize() { // File sizes do not match changed_files.push(stored_file.path()); } if working_file.modified_time != stored_file.modified_time() { // Time of modification does not match changed_files.push(stored_file.path()); } } changed_files } /// Compares the working directory with the hashes stored in a snapshot pub fn get_files_changed_since_snapshot<'b>( &self, snapshot: &'b Snapshot, hashed_files: &'b [(path::PathBuf, Hash)], ) -> Vec<&'b path::Path> { let hashed_paths = snapshot.get_files(); self.get_changed_files(hashed_paths) } // // TODO: Move this function into the working module // // Takes a vector of paths or pathbufs and returns a vector of tuples containing the path and the hash // fn hash_file_list<P: Into<path::PathBuf>>(file_list: Vec<P>) -> Result<Vec<(path::PathBuf, Hash)>> { // // TODO: This should be moved to Hash // let mut path_with_hash = Vec::with_capacity(file_list.len()); // // Hash all the files being snapshot and store it along with the path in the snapshot structure // for path_to_file in file_list { // let path_to_file = path_to_file.into(); // let hash_string = match Hash::hash_file(path_to_file.as_path()) { // Ok(hash_string) => hash_string, // Err(error) => return Err(error.add_debug_message(format!("Failed to process the list of files to hash, the problem file was {}", path_to_file.display()))), // }; // path_with_hash.push((path_to_file, hash_string)); // } // Ok(path_with_hash) // } } impl<'a> WorkingDirectory<'a> { /// Given a list of paths, this function returns a list of FileMetadata pub(crate) fn create_metadata_list( files_to_store: Vec<path::PathBuf>, ) -> Result<Vec<FileMetadata>> { let mut storage_list = Vec::new(); for file_to_store in files_to_store { let metadata = match WorkingDirectory::get_file_metadata(file_to_store) { Ok(metadata) => metadata, Err(error) => unimplemented!(), }; storage_list.push(metadata); } Ok(storage_list) } /// Gets the file size and time modified of the given path as well as its hash /// This is used to collect information about a file that is part of a snapshot fn get_file_metadata(path_to_file: path::PathBuf) -> Result<FileMetadata> { let hash_of_file = match Hash::hash_file(path_to_file.as_path()) { Ok(hash_of_file) => hash_of_file, Err(error) => unimplemented!(), }; let file_data = match path_to_file.metadata() { Ok(file_data) => file_data, Err(error) => unimplemented!(), }; let file_size = file_data.len(); let metadata = FileMetadata::new( hash_of_file, file_size, path_to_file, WorkingDirectory::get_modified_time(&file_data), ); Ok(metadata) } } impl<'a> WorkingDirectory<'a> { // Given a file in the working directory what has changed since the given snapshot pub fn get_patch(&self, snapshot_to_compare: &Snapshot, path_to_compare: &path::Path) { // path_to_compare should be relative to self.path_to_working // Obtaining the relative path should give the path stored in the snapshot // Check if the given path exists in the snapshot and if LocalStorage can find it // Obtain a built version of the file - built here refers to uncompressed and any delta compression applied // QUESTION: Do we create a temp file or map the entire file into memory, can we stream the file uncompressing as we process the file // IDEA: Since large files should be split into multiple smaller files we should be able to process each file individually and so map an entire file at once } // TODO: This function only works for removed based overlapping sequences pub fn get_edits_for_remove_overlapping_sequence( lines_removed: &mut HashMap<String, usize>, // A map of the changes needed to the previous slice lines_inserted: &mut HashMap<String, usize>, // A map of the changes needed to the current slice lines_moved: &mut HashMap<String, LineMoved>, current_line: usize, // Current line position in the sequences previous_line: usize, // Contains the line where the original sequence started original_file: &[String], edited_file: &[String], new_sequence_line: usize, current_sequence_length: usize, ) -> usize { let mut original_length = current_sequence_length; let mut overlapping_length = 1; // New sequence previous start is at new_sequence line but don't really need it let original_start = current_line; let original_previous = previous_line; let overlapping_start = current_line + original_length - 1; let overlapping_previous = new_sequence_line; println!( "original sequence starts at {} in {:?}, and is currently {} long", original_start, edited_file, original_length ); println!( "Checking new sequence, {} == {}", original_file[overlapping_start + overlapping_length], edited_file[overlapping_previous + overlapping_length] ); println!( "Checking original sequence, {} == {}", edited_file[original_start + original_length], original_file[original_previous + original_length] ); let mut original_continues = true; let mut overlapping_continues = true; loop { // NOTE: Once the sequence ends we must not accidently increase the length because of a random future match // NOTE: Can this happen, overlapping sequences need to be similiar lengths otherwise they can't overlap // ABCDMNJF // MNXLABCD // This can create a false match on F since the ABCD sequence ending is found at the same time let original_matched = match ( edited_file.get(original_start + original_length), original_file.get(original_previous + original_length), ) { (Some(current_original), Some(previous_original)) => { println!( "Previous line was {}, Current line was {}", current_original, previous_original ); current_original == previous_original } _ => false, }; let overlapping_matched = match ( original_file.get(overlapping_start + overlapping_length), edited_file.get(overlapping_previous + overlapping_length), ) { (Some(second_sequence), Some(first_sequence)) => { println!( "Second sequence was {}, Previous sequence was {}", second_sequence, first_sequence ); second_sequence == first_sequence } _ => false, }; match ( original_continues && original_matched, overlapping_continues && overlapping_matched, ) { (true, true) => { original_length += 1; overlapping_length += 1; } (false, true) => { overlapping_length += 1; // Mark original sequence as completed original_continues = false; } (true, false) => { original_length += 1; // Mark overlapping sequence as completed overlapping_continues = false; } (false, false) => { break; } } } println!("Length of original sequence is {}", original_length); println!("Length of overlapping sequence is {}", overlapping_length); println!("Lines Removed: {:?}", lines_removed); // We have lengths so now process the smaller and then return the new line position if original_length <= overlapping_length { // We add the original sequence to the changes as that is the smaller sequence for index in original_start..(original_start + original_length) { // TODO: To correctly add the original sequence we need to get the starting position when the overlap was detected // NOTE: We can solve the above problem by only adding items to the sequence if index >= overlapping_start // TODO: Need to take into account gaps in the overlapping sequence that need to be added to corrections } overlapping_length } else { // We add the overlapping sequence to the changes // NOTE: Need to remove any changes that were caused by the original stream before the overlapping stream started // TODO: We need to iterate over the larger sequence to process values accordingly // TODO: We use the smaller sequence index range to know what to do for a given index // TODO: More complicated than this, the range is overlapping_start to the max(start + length) // original_start + original_length - overlapping_start = The length of the original sequence but from the start of the overlapping sequence // TODO: Max iteration needs could take into account the max length of the file and avoid checking // This is the index range if original file is shorter than the end of the original sequence let min_size = original_file.len() - overlapping_start; let max_index = overlapping_length.max(original_start + original_length - overlapping_start); let actual_max = min_size.min(max_index); println!("Actual index max is {}", actual_max); println!("Test Iterating from {} to {}", overlapping_start, overlapping_start + actual_max); println!("Iterating from 0 to {}", max_index); for index in 0..actual_max { println!("Examining index {}, which is {} in the slice", index, index + overlapping_start); // We need to remove lines that were part of the original sequence that were added before // we knew that there was an overlapping sequence // TODO: Remove previous entries ranging from overlapping_start (index 0) to actual_max // The line opposite the last line of the println!( "Removing {} as leftover from processing simple sequence into overlap", edited_file[index + overlapping_start].as_str() ); lines_removed.remove(edited_file[index + overlapping_start].as_str()); if index < overlapping_length { let line_moved = LineMoved{ source_line: index + overlapping_start, destination_line: overlapping_previous + index, }; lines_moved.insert(original_file[overlapping_start + index].clone(), line_moved); // TODO: Remove the original inserts lines_inserted.remove(original_file[overlapping_start + index].as_str()); } else { println!("Do we need to add {} to the removed items", original_file[index + overlapping_start]); lines_removed.insert(original_file[index + overlapping_start].clone(), index + overlapping_start); } } println!("Original: {:?}, Edited: {:?}", original_file, edited_file); println!("Moved Lines: {:?}, Lines Removed: {:?}", lines_moved, lines_removed); original_length } } // TODO: Tortoise and Hare approach for dealing with duplicates - cycle detection // This function can process both finding a previously removed line or a previously inserted line fn get_sequence_edits( current_corrections: &mut HashMap<String, usize>, // A map of the changes needed to the previous slice to make the two slices equilivant previous_corrections: &mut HashMap<String, usize>, // A map of the changes needed to the current slice to make the two slices equilivant current_line: usize, previous_line: usize, current_data: &[String], // This is the slice that contains the first instance of the data of a sequence previous_data: &[String], // This is the slice that contains the second instance of the data of a sequence ) -> usize { // We already know that the slice is at least 1 length so we start from 1 let mut slice_length = 1; eprintln!("Current Data is {:?}", current_data); eprintln!("Current Corrections is {:?}", current_corrections); eprintln!("Previous Data is {:?}", previous_data); eprintln!("Previous Corrections is {:?}", previous_corrections); current_corrections.remove(current_data[current_line].as_str()); // NOTE: While its relatively safe to do this it's possible this needs to be reinserted if there are overlapping sequences println!( "Previous Corrections after initial remove: {:?}", previous_corrections ); let line_offset = current_line - previous_line; // NOTE: This match checks if the start of the slice is also the end of the slice // The ideal way of handling this is to immediately enter the below loop instead of special processing // This doesn't seem possible since we already know that the initial position is part of a slice so either we check twice // or change the order, ie remove then check next break if not match ( previous_data.get(current_line), // Line opposite the start of the slice current_data.get(current_line + line_offset), // Line to check to see if the start of the slice is also the end of the slice ) { (Some(opposite_line), Some(future_line)) => { eprintln!( "Opposite line was {}, future line was {}, do we add them {}", opposite_line, future_line, opposite_line != future_line ); if opposite_line != future_line { // We add the opposite line when these two lines are not equal but are also not NONE current_corrections.insert(previous_data[current_line].clone(), current_line); // TODO: We can exit slice handling early since the slice has ended } } (Some(_), None) => { // This means that there is a line opposite the slice but there are no future lines and the slice is ending, so we must add this line to current corrections current_corrections.insert(previous_data[current_line].clone(), current_line); // TODO: We can end slice handling early since the slice has ended } _ => {} }; // A slice can only be as long as the smallest file or section if we have broken the file into parts let total = current_data.len().max(previous_data.len()) - current_line; eprintln!("Processing a slice that is potentially {} long", total); // TODO: A loop here is not ideal but it can be vectorized // Process the slice // NOTE: We always check ahead by one to see if we add the current opposite side to the removed lines for _ in 1..total { eprintln!( "Slice Iteration: Previous corrections are {:?}, current corrections are {:?}", &current_corrections, &previous_corrections ); // QUESTION: Ideally we use an iterator here, if we return None the iterator stops, but dealing with overlapping slices becomes a challenge // TODO: These three variables can all be moved inside a custom iterator // Returns true if the two lines match otherwise false, this includes a line not being present etc... let slice_continues = match ( previous_data.get(previous_line + slice_length), // Previous line position current_data.get(current_line + slice_length), // Current line position ) { (Some(previous_line_data), Some(current_line_data)) => { println!( "Previous line was {}, Current line was {}", previous_line_data, current_line_data ); previous_line_data == current_line_data } _ => false, }; let add_opposite = match ( previous_data.get(current_line + slice_length), // Line opposite the current position in slice current_data.get(current_line + slice_length + line_offset), // Line to check to see if slice continues in future, ie do we add opposite ) { (Some(opposite_line), Some(future_line)) => { println!( "Opposite line was {}, future line was {}, do we add them {}", opposite_line, future_line, opposite_line != future_line ); opposite_line != future_line // We add the opposite line when these two lines are not equal but are also not NONE } (Some(_), None) => { // This means that there is a line opposite a slice but there are no future lines and the slice is ending, so we must add this line to removed true } _ => false, }; // Is there a sequence opposite the current one, we do this by seeing if we have previously seen the value opposite the current sequence listed in the previous corrections let overlapping_slice = previous_data .get(current_line + slice_length) .and_then(|line| { // previous_corrections.get returns a reference to its data, its data type is usize and is copyable // we may need to borrow previous_corrections again if we encounter overlapping sequences so we dereference // the usize and return it as an option previous_corrections .get(line) .and_then(|value| Some(*value)) }); println!( "Overlap Check: Looked for {:?} in {:?}", previous_data.get(current_line + slice_length), previous_corrections ); println!( "When checking for overlapping slice we found {:?} at line {}", overlapping_slice, current_line + slice_length ); // We match against the next index as well if it exists // slice_continues, slice_continues + 1, overlapping_slice match (slice_continues, add_opposite, overlapping_slice) { (true, _, Some(line_of_overlap)) => { // TODO: Here we return the longest of the two slices as the matching sequence // let longest_slice = WorkingDirectory::get_edits_for_overlapping_sequence( // current_corrections, // previous_corrections, // lines // current_line, // previous_line, // current_data, // previous_data, // line_of_overlap, // slice_length + 1, // We add one to account for the current iteration through the slice // ); unimplemented!("Debug Overlapping slices mid-sequence"); // return current_line + longest_slice; } (true, true, None) => { // Slice continues and we add the opposite to removed eprintln!( "Slice continues at {} in the new document and we need to add the opposite line", current_line + slice_length ); // If our position in the slice is less than the current position in the new file if previous_line + slice_length < current_line { // Remove items that were previously considered removed since we have not yet reached a point where previous doesn't point to unprocessed lines eprintln!( "Removing previous {} from removed lines since line {} in original should be part of the slice and is less than {} which is the current line position in the new document", &previous_data[previous_line + slice_length], previous_line + slice_length, current_line ); current_corrections.remove(&previous_data[previous_line + slice_length]); } // new_line + slice_length println!( "Adding {} at line {} to removed lines", previous_data[current_line + slice_length], current_line + slice_length ); current_corrections.insert( previous_data[current_line + slice_length].clone(), current_line + slice_length, ); slice_length += 1; } (true, false, None) => { // Slice continues but we do not add the opposite to removed eprintln!( "Slice continues at {} in the new document", current_line + slice_length ); // If our position in the slice is less than the current position in the new file if previous_line + slice_length < current_line { // Remove items that were previously considered removed since we have not yet reached a point where previous doesn't point to unprocessed lines eprintln!( "Removing previous {} from removed lines since line {} in original should be part of the slice and is less than {} which is the current line position in the new document", &previous_data[previous_line + slice_length], previous_line + slice_length, current_line ); current_corrections.remove(&previous_data[previous_line + slice_length]); } slice_length += 1; } (false, _, _) => break, // If the original slice stops as we detect another slice we dont care we deal with that on the next iteration } } current_line + slice_length } // This function can process both finding a previously removed line or a previously inserted line fn get_insert_sequence_edits( lines_inserted: &mut HashMap<String, usize>, // A map of the changes needed to the previous slice to make the two slices equilivant lines_removed: &mut HashMap<String, usize>, // A map of the changes needed to the current slice to make the two slices equilivant lines_moved: &mut HashMap<String, LineMoved>, current_line: usize, previous_line: usize, edited_file: &[String], // This is the slice that contains the first instance of the data of a sequence original_file: &[String], // This is the slice that contains the second instance of the data of a sequence ) -> usize { // We already know that the slice is at least 1 length so we start from 1 let mut slice_length = 1; eprintln!("Original Data is {:?}", original_file); eprintln!("Edited data is {:?}", edited_file); eprintln!("Lines inserted are {:?}", lines_inserted); eprintln!("Lines removed are {:?}", lines_removed); lines_inserted.remove(edited_file[previous_line].as_str()); // NOTE: While its relatively safe to do this it's possible this needs to be reinserted if there are overlapping sequences println!( "Lines removed after discovering a line that was thought to be inserted: {:?}", lines_inserted ); let line_offset = current_line - previous_line; // NOTE: This match checks if the start of the slice is also the end of the slice // The ideal way of handling this is to immediately enter the below loop instead of special processing // This doesn't seem possible since we already know that the initial position is part of a slice so either we check twice // or change the order, ie remove then check next break if not if WorkingDirectory::is_part_of_sequence( original_file, edited_file, current_line, line_offset, ) { lines_inserted.insert(edited_file[current_line].clone(), current_line); } // A slice can only be as long as the smallest file or section if we have broken the file into parts let total = original_file.len().max(edited_file.len()) - current_line; eprintln!("Processing a slice that is potentially {} long", total); // TODO: A loop here is not ideal but it can be vectorized // Process the slice // NOTE: We always check ahead by one to see if we add the current opposite side to the removed lines for _ in 1..total { eprintln!( "Slice Iteration: Previous corrections are {:?}, current corrections are {:?}", &lines_removed, &lines_inserted ); // QUESTION: Ideally we use an iterator here, if we return None the iterator stops, but dealing with overlapping slices becomes a challenge // TODO: These three variables can all be moved inside a custom iterator // Returns true if the two lines match otherwise false, this includes a line not being present etc... let slice_continues = match ( edited_file.get(previous_line + slice_length), // Previous line position original_file.get(current_line + slice_length), // Current line position ) { (Some(previous_line_data), Some(current_line_data)) => { println!( "Previous line was {}, Current line was {}", previous_line_data, current_line_data ); previous_line_data == current_line_data } _ => false, }; let add_opposite = WorkingDirectory::is_part_of_sequence( original_file, edited_file, current_line + slice_length, line_offset, ); // Is there a sequence opposite the current one, we do this by seeing if we have previously seen the value opposite the current sequence listed in the previous corrections let overlapping_slice = edited_file .get(current_line + slice_length) .and_then(|line| { // previous_corrections.get returns a reference to its data, its data type is usize and is copyable // we may need to borrow previous_corrections again if we encounter overlapping sequences so we dereference // the usize and return it as an option lines_inserted.get(line).and_then(|value| Some(*value)) }); println!( "Overlap Check: Looked for {:?} in {:?}", edited_file.get(current_line + slice_length), original_file ); println!( "When checking for overlapping slice we found {:?} at line {}", overlapping_slice, current_line + slice_length ); // We match against the next index as well if it exists // slice_continues, slice_continues + 1, overlapping_slice match (slice_continues, add_opposite, overlapping_slice) { (true, _, Some(line_of_overlap)) => { // TODO: Here we return the longest of the two slices as the matching sequence let longest_slice = WorkingDirectory::get_edits_for_remove_overlapping_sequence( lines_removed, lines_inserted, lines_moved, current_line, previous_line, original_file, edited_file, line_of_overlap, slice_length + 1, // We add one to account for the current iteration through the slice ); unimplemented!("Debug Overlapping slices mid-sequence"); return current_line + longest_slice; } (true, true, None) => { // Slice continues and we add the opposite to removed eprintln!( "Slice continues at {} in the new document and we need to add the opposite line", current_line + slice_length ); // If our position in the slice is less than the current position in the new file if previous_line + slice_length < current_line { // Remove items that were previously considered inserted since we have not yet reached a point where previous doesn't point to unprocessed lines eprintln!( "Removing previous {} from removed lines since line {} in original should be part of the slice and is less than {} which is the current line position in the new document", &edited_file[previous_line + slice_length], previous_line + slice_length, current_line ); lines_inserted.remove(&edited_file[previous_line + slice_length]); } // new_line + slice_length println!( "Adding {} at line {} to inserted lines", edited_file[current_line + slice_length], current_line + slice_length ); lines_inserted.insert( edited_file[current_line + slice_length].clone(), current_line + slice_length, ); slice_length += 1; } (true, false, None) => { // Slice continues but we do not add the opposite to removed eprintln!( "Slice continues at {} in the new document", current_line + slice_length ); // If our position in the slice is less than the current position in the new file if previous_line + slice_length < current_line { // Remove items that were previously considered removed since we have not yet reached a point where previous doesn't point to unprocessed lines eprintln!( "Removing previous {} from removed lines since line {} in original should be part of the slice and is less than {} which is the current line position in the new document", &edited_file[previous_line + slice_length], previous_line + slice_length, current_line ); lines_inserted.remove(&edited_file[previous_line + slice_length]); } slice_length += 1; } (false, _, _) => break, // If the original slice stops as we detect another slice we dont care we deal with that on the next iteration } } current_line + slice_length } fn is_part_of_sequence( sequence_data: &[String], opposite_sequence: &[String], current_line_in_slice: usize, line_offset: usize, ) -> bool { match ( opposite_sequence.get(current_line_in_slice), // Line opposite the current position in slice sequence_data.get(current_line_in_slice + line_offset), // Line to check to see if slice continues in future, ie do we add opposite ) { (Some(opposite_line), Some(future_line)) => { println!( "Opposite line was {}, future line was {}, do we add them {}", opposite_line, future_line, opposite_line != future_line ); opposite_line != future_line // We add the opposite line when these two lines are not equal but are also not NONE } (Some(_), None) => { // This means that there is a line opposite a slice but there are no future lines and the slice is ending, so we must add this line to removed true } _ => false, } } // This function can process both finding a previously removed line or a previously inserted line fn get_remove_sequence_edits( lines_inserted: &mut HashMap<String, usize>, // A map of the changes needed to the previous slice to make the two slices equilivant lines_removed: &mut HashMap<String, usize>, // A map of the changes needed to the current slice to make the two slices equilivant lines_moved: &mut HashMap<String, LineMoved>, current_line: usize, previous_line: usize, edited_file: &[String], // This is the slice that contains the first instance of the data of a sequence original_file: &[String], // This is the slice that contains the second instance of the data of a sequence ) -> usize { // We already know that the slice is at least 1 length so we start from 1 let mut slice_length = 1; eprintln!("Original Data is {:?}", original_file); eprintln!("Edited data is {:?}", edited_file); eprintln!("Lines inserted are {:?}", lines_inserted); eprintln!("Lines removed are {:?}", lines_removed); lines_removed.remove(original_file[previous_line].as_str()); // NOTE: While its relatively safe to do this it's possible this needs to be reinserted if there are overlapping sequences println!( "Lines removed after discovering a line that was tought to be removed: {:?}", lines_removed ); let line_offset = current_line - previous_line; // NOTE: This match checks if the start of the slice is also the end of the slice // The ideal way of handling this is to immediately enter the below loop instead of special processing // This doesn't seem possible since we already know that the initial position is part of a slice so either we check twice // or change the order, ie remove then check next break if not if WorkingDirectory::is_part_of_sequence( edited_file, original_file, current_line, line_offset, ) { lines_removed.insert(original_file[current_line].clone(), current_line); // TODO: If we end up here then the slice had a length of 1 and we are done } // A slice can only be as long as the smallest file or section if we have broken the file into parts let total = original_file.len().max(edited_file.len()) - current_line; eprintln!("Processing a slice that is potentially {} long", total); // TODO: A loop here is not ideal but it can be vectorized // Process the slice // NOTE: We always check ahead by one to see if we add the current opposite side to the removed lines for _ in 1..total { eprintln!( "Slice Iteration: Previous corrections are {:?}, current corrections are {:?}", &lines_removed, &lines_inserted ); // QUESTION: Ideally we use an iterator here, if we return None the iterator stops, but dealing with overlapping slices becomes a challenge // TODO: These three variables can all be moved inside a custom iterator // Returns true if the two lines match otherwise false, this includes a line not being present etc... let slice_continues = match ( original_file.get(previous_line + slice_length), // Previous line position edited_file.get(current_line + slice_length), // Current line position ) { (Some(previous_line_data), Some(current_line_data)) => { println!( "Previous line was {}, Current line was {}", previous_line_data, current_line_data ); previous_line_data == current_line_data } _ => false, }; let add_opposite = WorkingDirectory::is_part_of_sequence( edited_file, original_file, current_line + slice_length, line_offset, ); // Is there a sequence opposite the current one, we do this by seeing if we have previously seen the value opposite the current sequence listed in the previous corrections let overlapping_slice = original_file .get(current_line + slice_length) .and_then(|line| { // previous_corrections.get returns a reference to its data, its data type is usize and is copyable // we may need to borrow previous_corrections again if we encounter overlapping sequences so we dereference // the usize and return it as an option lines_inserted.get(line).and_then(|value| Some(*value)) }); println!( "Overlap Check: Looked for {:?} in {:?}", edited_file.get(current_line + slice_length), original_file ); println!( "When checking for overlapping slice we found {:?} at line {}", overlapping_slice, current_line + slice_length ); // We match against the next index as well if it exists // slice_continues, slice_continues + 1, overlapping_slice match (slice_continues, add_opposite, overlapping_slice) { (true, _, Some(line_of_overlap)) => { // TODO: Here we return the longest of the two slices as the matching sequence let longest_slice = WorkingDirectory::get_edits_for_remove_overlapping_sequence( lines_removed, lines_inserted, lines_moved, current_line, previous_line, original_file, edited_file, line_of_overlap, slice_length + 1, // We add one to account for the current iteration through the slice ); //unimplemented!("Debug Overlapping slices mid-sequence"); return current_line + longest_slice; } (true, true, None) => { // Slice continues and we add the opposite to removed eprintln!( "Slice continues at {} in the new document and we need to add the opposite line", current_line + slice_length ); // If our position in the slice is less than the current position in the new file if previous_line + slice_length < current_line { // Remove items that were previously considered removed since we have not yet reached a point where previous doesn't point to unprocessed lines eprintln!( "Removing previous {} from removed lines since line {} in original should be part of the slice and is less than {} which is the current line position in the new document", &original_file[previous_line + slice_length], previous_line + slice_length, current_line ); lines_removed.remove(&original_file[previous_line + slice_length]); } // new_line + slice_length eprintln!( "Adding {} at line {} to removed lines", original_file[current_line + slice_length], current_line + slice_length ); lines_removed.insert( original_file[current_line + slice_length].clone(), current_line + slice_length, ); slice_length += 1; } (true, false, None) => { // Slice continues but we do not add the opposite to removed eprintln!( "Slice continues at {} in the new document", current_line + slice_length ); // If our position in the slice is less than the current position in the new file if previous_line + slice_length < current_line { // Remove items that were previously considered removed since we have not yet reached a point where previous doesn't point to unprocessed lines eprintln!( "Removing previous {} from removed lines since line {} in original should be part of the slice and is less than {} which is the current line position in the new document", &original_file[previous_line + slice_length], previous_line + slice_length, current_line ); lines_removed.remove(&original_file[previous_line + slice_length]); } slice_length += 1; } (false, _, _) => break, // If the original slice stops as we detect another slice we dont care we deal with that on the next iteration } } current_line + slice_length } #[cfg(test)] pub fn file_patch( original_path: &path::Path, edited_path: &path::Path, ) -> (Vec<usize>, Vec<usize>) { use rand::Rng; use std::hash::Hasher; // TODO: Just for testing diffing algorithms let mut original_data = fs::OpenOptions::new() .read(true) .open(original_path) .unwrap(); let new_data = fs::OpenOptions::new().read(true).open(edited_path).unwrap(); let original_reader = io::BufReader::new(original_data); let new_reader = io::BufReader::new(new_data); let original_lines = original_reader.lines(); let new_lines = new_reader.lines(); // let kl = original_lines.zip(new_lines); // TODO: Need to hash the lines let mut rng = rand::thread_rng(); let key1: u128 = rng.gen(); let key2: u128 = rng.gen(); let hasher = ahash::AHasher::new_with_keys(key1, key2); let original_file: Vec<String> = original_lines .map(|line| { line.unwrap() // TODO: We need to trim all spaces // let mut hash = ahash::AHasher::new_with_keys(valid_line.len() as u64, key1); // hash.write(valid_line.as_bytes()); // hash.finish() }) .collect(); let edited_file: Vec<String> = new_lines .map(|line| { line.unwrap() // let mut hash = ahash::AHasher::new_with_keys(valid_line.len() as u64, key1); // hash.write(valid_line.as_bytes()); // hash.finish() }) .collect(); println!("Original File {:?}", original_file); println!("Edited File {:?}", edited_file); let mut lines_inserted = HashMap::new(); let mut lines_removed = HashMap::new(); let mut lines_moved = HashMap::new(); // Lines that were thought to have been removed but were moved // Lines that were thought to have been inserted but were moved // We only use the results for each section we process from one of the above move hash maps let mut current_line: usize = 0; // TODO: Do we replace this loop with an iterator // NOTE: We can precompute lines to process by max file - min file // NOTE: Then we can process the remainder in a following match statement and merge the results // NOTE: if max_file = original then process original rem // NOTE: if max_file = edited then process edited rem // TODO: Simplify this to an iterator followed by another iterator, iterate over all lines in both files and then lines present in one of the files let total_lines = original_file.len().min(edited_file.len()); // Cant iterate over the lines directly because of overlapping sequences // Iterating over lines for single sequences would be possible with a custom iterator but gains us nothing while current_line < total_lines { println!("Scanning line {} in both files", current_line); println!( "Original: {} Edited: {}", original_file[current_line], edited_file[current_line] ); if original_file[current_line] == edited_file[current_line] { // Matching lines are trivial as there is nothing to do so move to next line current_line += 1; } else { // Compares the line from the original text with any previous unmatched lines from the new text and vice versa // NOTE: Lines that were considered removed will be found in new_data since removed lines were present in original but not in changed // NOTE: Lines that were considered inserted will be found in original since inserted lines were present in changed but not in original match ( lines_removed.get(&edited_file[current_line]), lines_inserted.get(original_file[current_line].as_str()), ) { (Some(previously_removed), Some(previously_inserted)) => { // Both lines have been seen so two slices are overlapping println!("Both have been seen so guarenteed overlapping move"); unimplemented!("Overlapping sequences not supported yet"); //return WorkingDirectory::process_overlapping(6); // We check both slices until one of them ends, the one that ends first is the one we use // Here the two overlapping slices begin at the same point current_line += 1; } (Some(previously_removed), None) => { // Example of match // A K // B L // C M // D A <- Previously thought to be removed // A line in the original file has been seen before // Get the line number where we previously saw this line let previous_line = *previously_removed; eprintln!("A line previously thought to be removed has been seen, {} has been seen at line {}, {} has not", edited_file[current_line].as_str(), previous_line, original_file[current_line].as_str()); println!("Line before sequence: {}", current_line); current_line = WorkingDirectory::get_remove_sequence_edits( &mut lines_inserted, &mut lines_removed, &mut lines_moved, current_line, previous_line, edited_file.as_slice(), original_file.as_slice(), ); //num_iter.skip(new_line); //num_iter.next(); println!("Line after sequence: {}", current_line); } (None, Some(previously_inserted)) => { // Example of match // A K // B L // C M // D A <- Previously thought to be removed // A line in the original file has been seen before // Get the line number where we previously saw this line let previous_line = *previously_inserted; eprintln!("A line previously thought to be inserted has been seen, {} has been seen at line {}, {} has not", original_file[current_line].as_str(), previous_line, edited_file[current_line].as_str()); current_line = WorkingDirectory::get_insert_sequence_edits( &mut lines_inserted, &mut lines_removed, &mut lines_moved, current_line, previous_line, edited_file.as_slice(), original_file.as_slice(), ); } (None, None) => { // Neither of these lines have been seen before // So we they are prospective removed and inserted lines println!( "We have not seen {} at line {} or {} at line {} before", &original_file[current_line], current_line, &edited_file[current_line], current_line ); // This does not mean that these lines are not duplicates lines_inserted.insert(edited_file[current_line].clone(), current_line); lines_removed.insert(original_file[current_line].clone(), current_line); current_line += 1; } } println!("----------------------------------------------------------------------------------------------------"); } } // TODO: Currently the above function can process a slice that exceeds the length of one of the files // TODO: But only if the slice starts within the range of both files println!("Current line is {}", current_line); let appended_lines = current_line..edited_file.len().max(original_file.len()); if original_file.len() >= edited_file.len() { // Scan additional lines in original file for current_line in appended_lines { println!("Scanning line {} in original lines", current_line); println!( "Additional Line: {}", original_file[current_line] ); if lines_inserted.contains_key(&original_file[current_line]) { // This is part of a slice ie // ABCJUI // HFJXZKABC // However do we really save anything by scanning ahead instead of processing one at a time println!("Removing {} at line {} at the end of original file", original_file[current_line], current_line); lines_inserted .remove_entry(&original_file[current_line]); } else { lines_removed.insert(edited_file[current_line].clone(), current_line); } } } else { // Scan additional lines in edited for current_line in appended_lines { println!("Scanning line {} in edited lines", current_line); println!( "Additional Line: {}", edited_file[current_line] ); if lines_removed.contains_key(&edited_file[current_line]) { // This is part of a slice ie // ABCJUI // HFJXZKABC // However do we really save anything by scanning ahead instead of processing one at a time println!("Removing {} at line {} at the end of edited file", edited_file[current_line], current_line); lines_removed .remove_entry(&edited_file[current_line]); } else { lines_inserted.insert(edited_file[current_line].clone(), current_line); } } } println!("Inserted {:?}", lines_inserted); println!("Lines removed {:?}", lines_removed); return ( lines_removed .values() .map(|line_number| *line_number) .collect::<Vec<_>>(), lines_inserted .values() .map(|line_number| *line_number) .collect::<Vec<_>>(), ); } fn infinite_loop_method( current_line: usize, original_file: &mut Vec<String>, edited_file: &mut Vec<String>, ) { // loop { // // TODO: Remove this initial branch and instead compute the total iterations required as well as the additional reads required at the end for the longer file or use an iterator that returns None when the file has no more data // if current_line < original_file.len() && current_line < edited_file.len() { // if original_file[current_line] == edited_file[current_line] { // // Lines are the same, just move to the next set of lines // current_line += 1; // } else { // // Compares the line from the original text with any previous unmatched lines from the new text and vice versa // // NOTE: Lines that were considered removed will be found in new_data since removed lines were present in original but not in changed // // NOTE: Lines that were considered inserted will be found in original since inserted lines were present in changed but not in original // match ( // lines_removed.get(&edited_file[current_line]), // lines_inserted.get(original_file[current_line].as_str()), // ) { // (Some(previously_removed), Some(previously_inserted)) => { // // Both lines have been seen so two slices are overlapping // println!("Both have been seen so guarenteed overlapping move"); // unimplemented!("Overlapping sequences not supported yet"); // //return WorkingDirectory::process_overlapping(6); // // We check both slices until one of them ends, the one that ends first is the one we use // // Here the two overlapping slices begin at the same point // current_line += 1; // } // (Some(previously_removed), None) => { // // Example of match // // A K // // B L // // C M // // D A <- Previously thought to be removed // // A line in the original file has been seen before // // Get the line number where we previously saw this line // let previous_line = *previously_removed; // eprintln!("A line previously thought to be removed has been seen, {} has been seen at line {}, {} has not", edited_file[current_line].as_str(), previous_line, original_file[current_line].as_str()); // current_line = WorkingDirectory::get_remove_sequence_edits( // &mut lines_inserted, // &mut lines_removed, // current_line, // previous_line, // edited_file.as_slice(), // original_file.as_slice(), // ); // } // (None, Some(previously_inserted)) => { // // Example of match // // A K // // B L // // C M // // D A <- Previously thought to be removed // // A line in the original file has been seen before // // Get the line number where we previously saw this line // let previous_line = *previously_inserted; // eprintln!("A line previously thought to be inserted has been seen, {} has been seen at line {}, {} has not", original_file[current_line].as_str(), previous_line, edited_file[current_line].as_str()); // current_line = WorkingDirectory::get_insert_sequence_edits( // &mut lines_inserted, // &mut lines_removed, // current_line, // previous_line, // edited_file.as_slice(), // original_file.as_slice(), // ); // } // (None, None) => { // // Neither of these lines have been seen before // // So we they are prospective removed and inserted lines // println!( // "We have not seen {} at line {} or {} at line {} before", // &original_file[current_line], // current_line, // &edited_file[current_line], // current_line // ); // // This does not mean that these lines are not duplicates // // Check for a new_data[new_line] in lines_inserted and if so edit the entry with the an additional line // lines_inserted.insert(edited_file[current_line].clone(), current_line); // lines_removed.insert(original_file[current_line].clone(), current_line); // current_line += 1; // } // } // println!("----------------------------------------------------------------------------------------------------"); // } // } else if current_line < original_file.len() { // // We have run out of lines in the new file // // Check original line to see if it is listed in the inserted lines = becomes moved // if lines_inserted.contains_key(&original_file[current_line]) { // // This was a move and not a insert // // TODO: We can still have slices // let (previous_insert, previous_insert_line) = lines_inserted // .remove_entry(&original_file[current_line]) // .unwrap(); // println!("Line {} was moved not inserted", current_line); // lines_inserted_move // .insert(previous_insert, (current_line, previous_insert_line)); // } else { // lines_removed.insert(original_file[current_line].clone(), current_line); // } // current_line += 1; // } else if current_line < edited_file.len() { // // We have run out of lines in the old file // // Check new line to see if it is listed in the removed lines = becomes moved and check the index difference for alignment // if lines_removed.contains_key(&edited_file[current_line]) { // // TODO: We can still have slices // // We need to check for overlapping slices, otherwise we ignore them - but they can never overlap // // This was a move and not a removal // let (previous_remove, previous_remove_line) = lines_removed // .remove_entry(&edited_file[current_line]) // .unwrap(); // println!("Line {} was moved not removed", previous_remove_line); // lines_removed_move // .insert(previous_remove, (previous_remove_line, current_line)); // } else { // lines_inserted.insert(edited_file[current_line].clone(), current_line); // } // current_line += 1; // // Add new to inserted // } else { // // Nothing left to scan // break; // } // } } } enum SliceType { Overlapping(usize, usize), Simple(usize), } #[cfg(test)] mod tests { use super::RawZip; use super::{Move, ProspectiveDifference, ProspectiveMove}; use crate::hash::Hash; use crate::working::WorkingDirectory; use smallvec::SmallVec; use std::collections::HashMap; use testspace::Alphabet; use testspace::{TestSpace, TestSpaceFile}; #[test] fn basic_previously_removed_short_test() { let ts = TestSpace::new(); let mut original_file = ts.create_text_file(); original_file.append_line("A"); // 0 original_file.append_line("B"); // 1 original_file.append_line("C"); // 2 original_file.append_line("D"); // 3 let mut changed_file = ts.create_text_file(); changed_file.append_line("J"); // 0 changed_file.append_line("A"); // 1 changed_file.append_line("B"); // 2 changed_file.append_line("C"); // 3 // ABCD // JABC let (mut removed_lines, mut inserted_lines) = WorkingDirectory::file_patch(original_file.get_path(), changed_file.get_path()); removed_lines.sort(); inserted_lines.sort(); assert_eq!(removed_lines.len(), 1); assert_eq!(inserted_lines.len(), 1); assert_eq!(removed_lines, vec!(3)); // Line 3 in original document was removed assert_eq!(inserted_lines, vec!(0)); // Line 0 in edited document was inserted } #[test] fn basic_previously_removed_test() { let ts = TestSpace::new(); let mut original_file = ts.create_text_file(); original_file.append_line("A"); // 0 original_file.append_line("B"); // 1 original_file.append_line("C"); // 2 original_file.append_line("D"); // 3 original_file.append_line("E"); // 4 let mut changed_file = ts.create_text_file(); changed_file.append_line("J"); // 0 changed_file.append_line("K"); // 1 changed_file.append_line("A"); // 2 changed_file.append_line("B"); // 3 changed_file.append_line("C"); // 4 // ABCDE // JKABC let (mut removed_lines, mut inserted_lines) = WorkingDirectory::file_patch(original_file.get_path(), changed_file.get_path()); removed_lines.sort(); inserted_lines.sort(); assert_eq!(removed_lines, vec!(3, 4)); // Line 3 and 4 in original document was removed assert_eq!(inserted_lines, vec!(0, 1)); // Line 0 and 1 in edited document was inserted } #[test] fn length_of_one_sequence_removed_test() { // Sequence is only one long let ts = TestSpace::new(); let mut original_file = ts.create_text_file(); original_file.append_line("A"); // 0 original_file.append_line("B"); // 1 original_file.append_line("C"); // 2 original_file.append_line("P"); // 3 let mut changed_file = ts.create_text_file(); changed_file.append_line("K"); // 0 changed_file.append_line("A"); // 1 changed_file.append_line("S"); // 2 changed_file.append_line("T"); // 3 // ABCP // KAST let (mut removed_lines, mut inserted_lines) = WorkingDirectory::file_patch(original_file.get_path(), changed_file.get_path()); removed_lines.sort(); inserted_lines.sort(); assert_eq!(removed_lines, vec!(1, 2, 3)); // Line 3 and 4 in original document was removed assert_eq!(inserted_lines, vec!(0, 2, 3)); // Line 0 and 1 in edited document was inserted } #[test] fn basic_previously_removed_more_overlap_test() { // Tests appropriate response to multiple overlapping lines // ie A and B in changed file overlap with sequence in original in 2 places not one let ts = TestSpace::new(); let mut original_file = ts.create_text_file(); original_file.append_line("A"); // 0 original_file.append_line("B"); // 1 original_file.append_line("C"); // 2 original_file.append_line("D"); // 3 original_file.append_line("E"); // 4 let mut changed_file = ts.create_text_file(); changed_file.append_line("J"); // 0 changed_file.append_line("A"); // 1 changed_file.append_line("B"); // 2 changed_file.append_line("C"); // 3 changed_file.append_line("F"); // 4 // ABCDE // JABCF let (mut removed_lines, mut inserted_lines) = WorkingDirectory::file_patch(original_file.get_path(), changed_file.get_path()); removed_lines.sort(); inserted_lines.sort(); assert_eq!(removed_lines, vec!(3, 4)); // Line 3 and 4 in original document was removed assert_eq!(inserted_lines, vec!(0, 4)); // Line 0 and 1 in edited document was inserted } #[test] fn length_of_one_sequence_inserted_test() { let ts = TestSpace::new(); let mut original_file = ts.create_text_file(); original_file.append_line("B"); // 0 original_file.append_line("A"); // 1 original_file.append_line("C"); // 2 original_file.append_line("P"); // 3 let mut changed_file = ts.create_text_file(); changed_file.append_line("A"); // 0 changed_file.append_line("K"); // 1 changed_file.append_line("S"); // 2 changed_file.append_line("T"); // 3 // ABCP // KAST let (mut removed_lines, mut inserted_lines) = WorkingDirectory::file_patch(original_file.get_path(), changed_file.get_path()); removed_lines.sort(); inserted_lines.sort(); assert_eq!(removed_lines, vec!(0, 2, 3)); assert_eq!(inserted_lines, vec!(1, 2, 3)); } #[test] fn non_interacting_sequence_test() { let ts = TestSpace::new(); let mut original_file = ts.create_text_file(); original_file.append_line("A"); // 0 original_file.append_line("B"); // 1 original_file.append_line("C"); // 2 original_file.append_line("D"); // 3 original_file.append_line("G"); // 4 original_file.append_line("H"); // 5 original_file.append_line("I"); // 6 let mut changed_file = ts.create_text_file(); changed_file.append_line("Y"); // 0 changed_file.append_line("V"); // 1 changed_file.append_line("Z"); // 2 changed_file.append_line("X"); // 3 changed_file.append_line("A"); // 4 changed_file.append_line("B"); // 5 changed_file.append_line("C"); // 6 let (mut removed_lines, mut inserted_lines) = WorkingDirectory::file_patch(original_file.get_path(), changed_file.get_path()); removed_lines.sort(); inserted_lines.sort(); assert_eq!(removed_lines, vec!(3, 4, 5, 6)); assert_eq!(inserted_lines, vec!(0, 1, 2, 3)); } #[test] fn basic_previously_inserted_test() { let ts = TestSpace::new(); let mut original_file = ts.create_text_file(); original_file.append_line("J"); // 0 original_file.append_line("K"); // 1 original_file.append_line("A"); // 2 original_file.append_line("B"); // 3 original_file.append_line("C"); // 4 let mut changed_file = ts.create_text_file(); changed_file.append_line("A"); // 0 changed_file.append_line("B"); // 1 changed_file.append_line("C"); // 2 changed_file.append_line("D"); // 3 changed_file.append_line("E"); // 4 // JKABC // ABCDE let (mut removed_lines, mut inserted_lines) = WorkingDirectory::file_patch(original_file.get_path(), changed_file.get_path()); removed_lines.sort(); inserted_lines.sort(); assert_eq!(removed_lines, vec!(0, 1)); assert_eq!(inserted_lines, vec!(3, 4)); } #[test] fn removed_uneven_lengths_test() { // NOTE: This uses the removed path because the slice is opposite items that would have maybe been removed let ts = TestSpace::new(); let mut original_file = ts.create_text_file(); original_file.append_line("A"); // 0 original_file.append_line("B"); // 1 original_file.append_line("C"); // 2 let mut changed_file = ts.create_text_file(); changed_file.append_line("D"); // 0 changed_file.append_line("E"); // 1 changed_file.append_line("A"); // 2 changed_file.append_line("B"); // 3 changed_file.append_line("C"); // 4 // ABC // DEABC let (mut removed_lines, mut inserted_lines) = WorkingDirectory::file_patch(original_file.get_path(), changed_file.get_path()); removed_lines.sort(); inserted_lines.sort(); assert_eq!(removed_lines.len(), 0); assert_eq!(inserted_lines, vec!(0, 1)); } #[test] fn inserted_uneven_lengths_test() { let ts = TestSpace::new(); let mut original_file = ts.create_text_file(); original_file.append_line("D"); // 0 original_file.append_line("E"); // 1 original_file.append_line("A"); // 2 original_file.append_line("B"); // 3 original_file.append_line("C"); // 4 let mut changed_file = ts.create_text_file(); changed_file.append_line("A"); // 0 changed_file.append_line("B"); // 1 changed_file.append_line("C"); // 2 // ABC // DEABC let (mut removed_lines, mut inserted_lines) = WorkingDirectory::file_patch(original_file.get_path(), changed_file.get_path()); removed_lines.sort(); inserted_lines.sort(); assert_eq!(inserted_lines.len(), 0); assert_eq!(removed_lines, vec!(0, 1)); } #[test] fn advanced_uneven_lengths_test() { // Tests the case where the sequence does not overlap with the other file let ts = TestSpace::new(); let mut original_file = ts.create_text_file(); original_file.append_line("D"); // 0 original_file.append_line("E"); // 1 original_file.append_line("F"); // 1 original_file.append_line("A"); // 2 original_file.append_line("B"); // 3 original_file.append_line("C"); // 4 let mut changed_file = ts.create_text_file(); changed_file.append_line("A"); // 0 changed_file.append_line("B"); // 1 changed_file.append_line("C"); // 2 // ABC // DEFABC let (mut removed_lines, mut inserted_lines) = WorkingDirectory::file_patch(original_file.get_path(), changed_file.get_path()); removed_lines.sort(); inserted_lines.sort(); assert_eq!(inserted_lines.len(), 0); assert_eq!(removed_lines, vec!(0, 1, 2)); } #[test] fn basic_overlapping_midsequence_test() { let ts = TestSpace::new(); let mut original_file = ts.create_text_file(); original_file.append_line("A"); // 0 original_file.append_line("B"); // 1 original_file.append_line("C"); // 2 original_file.append_line("M"); // 3 = Overlapping sequence starts here original_file.append_line("N"); // 4 let mut changed_file = ts.create_text_file(); changed_file.append_line("M"); // 0 changed_file.append_line("N"); // 1 changed_file.append_line("A"); // 2 changed_file.append_line("B"); // 3 changed_file.append_line("C"); // 4 // JKABC // ABCDE // This overlapping sequence is triggered mid sequence WorkingDirectory::file_patch(original_file.get_path(), changed_file.get_path()); } #[test] fn advanced_overlapping_midsequence_test() { let ts = TestSpace::new(); let mut original_file = ts.create_text_file(); original_file.append_line("A"); // 0 original_file.append_line("B"); // 1 original_file.append_line("C"); // 2 original_file.append_line("D"); // 3 original_file.append_line("E"); // 4 original_file.append_line("F"); // 5 original_file.append_line("M"); // 6 original_file.append_line("N"); // 7 original_file.append_line("X"); // 8 original_file.append_line("Y"); // 9 let mut changed_file = ts.create_text_file(); changed_file.append_line("M"); // 0 changed_file.append_line("N"); // 1 changed_file.append_line("O"); // 2 changed_file.append_line("P"); // 3 changed_file.append_line("Q"); // 4 changed_file.append_line("A"); // 5 changed_file.append_line("B"); // 6 = Overlapping sequence starts here changed_file.append_line("C"); // 7 changed_file.append_line("D"); // 8 changed_file.append_line("E"); // 9 changed_file.append_line("F"); // 10 // Should check indexes from 6..9 // ABCDEFMNXY // MNOPQABCDEF // This overlapping sequence is triggered mid sequence WorkingDirectory::file_patch(original_file.get_path(), changed_file.get_path()); } // TODO: Lots more tests for overlapping sequences #[test] fn basic_overlapping_sequence_test() { let ts = TestSpace::new(); let mut original_file = ts.create_text_file(); original_file.append_line("A"); // 0 original_file.append_line("B"); // 1 original_file.append_line("C"); // 2 original_file.append_line("M"); // 3 = Overlapping sequence starts here original_file.append_line("N"); // 4 original_file.append_line("O"); // 5 let mut changed_file = ts.create_text_file(); changed_file.append_line("M"); // 0 changed_file.append_line("N"); // 1 changed_file.append_line("O"); // 2 changed_file.append_line("A"); // 3 changed_file.append_line("B"); // 4 changed_file.append_line("C"); // 5 // JKABC // ABCDE // This overlapping sequence is triggered on a new line WorkingDirectory::file_patch(original_file.get_path(), changed_file.get_path()); } #[test] fn advanced_sequence_diff_test() { let ts = TestSpace::new(); let mut original_file = ts.create_text_file(); original_file.append_line("A"); // 0 original_file.append_line("B"); // 1 original_file.append_line("C"); // 2 original_file.append_line("D"); // 3 original_file.append_line("E"); // 4 original_file.append_line("F"); // 5 original_file.append_line("G"); // 6 original_file.append_line("H"); // 7 original_file.append_line("I"); // 8 original_file.append_line("J"); // 9 original_file.append_line("K"); // 10 let mut new_file = original_file.create_copy(); // ABCDEFGHIJK => // JKEABCDFGHI // Results should be E Moved and JK Moved new_file.move_line(4, 0); new_file.move_line(9, 0); new_file.move_line(10, 1); // WorkingDirectory::file_patch() WorkingDirectory::file_patch(original_file.get_path(), new_file.get_path()); } #[test] fn diff_test() { let ts = TestSpace::new().allow_cleanup(false); let mut original_file = ts.create_text_file(); original_file.append_line("A"); original_file.append_line("B"); original_file.append_line("C"); original_file.append_line("D"); let mut new_file = original_file.create_copy(); // ABCD => BACD new_file.swap_lines(0, 1); // WorkingDirectory::file_patch() WorkingDirectory::file_patch(original_file.get_path(), new_file.get_path()); } #[test] fn diff_test_2() { let ts = TestSpace::new().allow_cleanup(false); let mut original_file = ts.create_text_file(); original_file.append_line("A"); original_file.append_line("B"); original_file.append_line("C"); original_file.append_line("D"); let mut new_file = original_file.create_copy(); new_file.swap_lines(1, 3); WorkingDirectory::file_patch(original_file.get_path(), new_file.get_path()); } #[test] fn diff_test_3() { let ts = TestSpace::new().allow_cleanup(false); let mut original_file = ts.create_text_file(); original_file.append_line("A"); original_file.append_line("B"); original_file.append_line("C"); original_file.append_line("D"); let mut new_file = original_file.create_copy(); new_file.move_line(1, 3); // A C D B WorkingDirectory::file_patch(original_file.get_path(), new_file.get_path()); } #[test] fn diff_test_4() { let ts = TestSpace::new().allow_cleanup(false); let mut original_file = ts.create_text_file(); original_file.append_line("A"); original_file.append_line("C"); original_file.append_line("D"); original_file.append_line("B"); let mut new_file = original_file.create_copy(); new_file.move_line(3, 1); // A B C D // WorkingDirectory::file_patch() WorkingDirectory::file_patch(original_file.get_path(), new_file.get_path()); } #[test] fn diff_test_5() { let ts = TestSpace::new().allow_cleanup(false); let mut original_file = ts.create_text_file(); original_file.append_line("A line was removed"); original_file.append_line("C wasn't moved"); original_file.append_line("D wasn't moved either"); original_file.append_line("B was moved"); let mut new_file = original_file.create_copy(); new_file.remove_line(0); // C D B new_file.move_line(2, 0); // B C D // WorkingDirectory::file_patch() WorkingDirectory::file_patch(original_file.get_path(), new_file.get_path()); } #[test] fn diff_test_6() { let ts = TestSpace::new().allow_cleanup(false); let mut original_file = ts.create_text_file(); original_file.append_line("A"); original_file.append_line("B"); original_file.append_line("C"); original_file.append_line("D"); let mut new_file = original_file.create_copy(); new_file.move_line(2, 1); // ACBD new_file.insert_line(3, "A"); // ACBAD // WorkingDirectory::file_patch() WorkingDirectory::file_patch(original_file.get_path(), new_file.get_path()); } #[test] fn diff_test_7() { let ts = TestSpace::new().allow_cleanup(false); let mut original_file = ts.create_text_file(); original_file.append_line("A"); original_file.append_line("B"); original_file.append_line("C"); original_file.append_line("D"); original_file.append_line("E"); original_file.append_line("A"); original_file.append_line("B"); original_file.append_line("C"); let mut new_file = original_file.create_copy(); new_file.move_line(1, 7); // ACDEABCB // WorkingDirectory::file_patch() WorkingDirectory::file_patch(original_file.get_path(), new_file.get_path()); } #[test] fn create_metadata_list_test() { let mut ts = TestSpace::new(); let file_list = ts.create_random_files(2, 4096); // TODO: Finish test let metadata = WorkingDirectory::create_metadata_list(file_list) .expect("Failed to create metadata list"); // println!("Data is: {:?}", metadata); for file in metadata { assert_eq!(file.filesize(), 4096); } } #[test] fn index_directory_test() { let mut ts = TestSpace::new(); let mut ts2 = ts.create_child(); ts.create_random_files(5, 4096); let path_to_repository = ts2.get_path(); let path_to_working = ts.get_path(); let ti = WorkingDirectory::new(path_to_working); let mut files_found = HashMap::new(); let mut directories = Vec::new(); WorkingDirectory::index_directory(path_to_working, &mut files_found, &mut directories) .expect("Failed to index directory"); println!("Files found"); assert_eq!(files_found.len(), 5); for data in files_found { assert_eq!(data.1.file_size, 4096); } } #[test] fn index_repository_test() { let mut ts = TestSpace::new(); let mut ts2 = ts.create_child(); // Create fake repository files ts2.create_random_files(5, 4096); let mut ts3 = ts.create_child(); ts3.create_random_files(4, 4096); ts.create_random_files(3, 4096); let path_to_repository = ts2.get_path(); let path_to_working = ts.get_path(); let ti = WorkingDirectory::new(path_to_working); let files = ti.index_repository().expect("Failed to index repository"); println!("Files found"); assert_eq!(files.len(), 12); for data in files { assert_eq!(data.1.file_size, 4096); } } #[test] fn get_changed_files_test() { use rand::prelude::*; use std::path; let mut rng = thread_rng(); let mut ts = TestSpace::new(); let original_files = ts.create_random_files(6, 4096); let path_to_working = ts.get_path(); let ti = WorkingDirectory::new(path_to_working); let working_state = WorkingDirectory::create_metadata_list(original_files.clone()) .expect("Failed to process original files"); // Change some of the files // Generate a list of files to change from the list of files let mut files_to_change: Vec<&path::Path> = original_files .choose_multiple(&mut rng, 3) .map(|x| x.as_path()) .collect(); // Change the files in the new list for file_to_change in &files_to_change { TestSpaceFile::from(*file_to_change).write_random_bytes(2048); } let mut result = ti.get_changed_files(working_state.as_slice()); // Check that the returned files match the ones we changed println!("List of files: {:?}", original_files.as_slice()); println!("List of files changed: {:?}", files_to_change.as_slice()); println!("Detected files that were changed: {:?}", result.as_slice()); // Sort both of the lists as the order is not guarenteed to be the same result.sort(); files_to_change.sort(); assert_eq!(result, files_to_change); } #[test] fn get_changed_files_since_snapshot_test() { unimplemented!("Test not done"); // Take a snapshot // Change some files // Check what changed with what was changed } }
use std::collections::HashSet; use std::fs::File; use std::io::{BufRead, BufReader}; fn main() { let file = File::open("input").expect("Failed to open file"); let reader = BufReader::new(file); let mut counts: HashSet<i64> = HashSet::new(); let mut current = 0; counts.insert(current); let mut freqs: Vec<i64> = Vec::new(); for line in reader.lines() { let freq = line.unwrap().parse::<i64>().unwrap(); freqs.push(freq); } loop { for freq in &freqs { current += freq; if counts.contains(&current) { println!("Repeated frequency: {}", current); return; } counts.insert(current); } } }
extern crate rustc_hex; extern crate wasmi; #[macro_use] extern crate clap; use std::time::{Duration, Instant}; use rustc_hex::FromHex; use serde::{Deserialize, Serialize}; use std::env; use std::fs::File; use wasmi::memory_units::Pages; use wasmi::{ Error as InterpreterError, Externals, FuncInstance, FuncRef, ImportsBuilder, MemoryInstance, MemoryRef, Module, ModuleImportResolver, ModuleInstance, NopExternals, RuntimeArgs, RuntimeValue, Signature, Trap, TrapKind, ValueType, }; use clap::{Arg, App, SubCommand}; struct Runtime<'a> { memory: Option<MemoryRef>, block_data: &'a [u8], } impl<'a> Runtime<'a> { fn new( block_data: &'a [u8], memory: Option<MemoryRef>, ) -> Runtime<'a> { Runtime { memory: if memory.is_some() { memory } else { // Allocate a single page if no memory was exported. Some(MemoryInstance::alloc(Pages(1), Some(Pages(1))).unwrap()) }, block_data: block_data } } } impl<'a> Externals for Runtime<'a> { fn invoke_index( &mut self, index: usize, args: RuntimeArgs, ) -> Result<Option<RuntimeValue>, Trap> { //println!("invoking index {}", index); match index { // TODO why are these indices reversed compared to the order of the functions in the wat 1 => { let ret: i32 = self.block_data.len() as i32; //println!("blockdatasize {}", ret); Ok(Some(ret.into())) }, 0 => { let ptr: u32 = args.nth(0); let offset: u32 = args.nth(1); let length: u32 = args.nth(2); println!( "blockdatacopy to {} from {} for {} bytes", ptr, offset, length ); // TODO: add overflow check let offset = offset as usize; let length = length as usize; // TODO: add checks for out of bounds access let memory = self.memory.as_ref().expect("expects memory object"); memory .set(ptr, &self.block_data[offset..length]) .expect("expects writing to memory to succeed"); Ok(None) }, PUSHNEWDEPOSIT_FUNC_INDEX => unimplemented!(), _ => panic!("unknown function index"), } } } struct RuntimeModuleImportResolver; impl<'a> ModuleImportResolver for RuntimeModuleImportResolver { fn resolve_func( &self, field_name: &str, _signature: &Signature, ) -> Result<FuncRef, InterpreterError> { let func_ref = match field_name { "inputDataCopy" => FuncInstance::alloc_host( Signature::new(&[ValueType::I32, ValueType::I32, ValueType::I32][..], None), 0, ), "getInputDataSize" => FuncInstance::alloc_host( Signature::new(&[][..], Some(ValueType::I32)), 1, ), _ => { return Err(InterpreterError::Function(format!( "host module doesn't export function with name {}", field_name ))) } }; Ok(func_ref) } } pub fn execute_code( code: &[u8], block_data: &[u8], ) { let module = Module::from_buffer(&code).expect("Module loading to succeed"); let mut imports = ImportsBuilder::new(); // FIXME: use eth2 imports.push_resolver("env", &RuntimeModuleImportResolver); let instance = ModuleInstance::new(&module, &imports) .expect("Module instantation expected to succeed") .assert_no_start(); let internal_mem = instance .export_by_name("memory") .expect("Module expected to have 'memory' export") .as_memory() .cloned() .expect("'memory' export should be a memory"); let mut runtime = Runtime::new(block_data, Some(internal_mem)); let now = Instant::now(); let result = instance .invoke_export("main", &[], &mut runtime) .expect("Executed 'main'"); println!("execution time: {} microseconds", now.elapsed().as_micros()); } fn load_file(filename: &str) -> Vec<u8> { use std::io::prelude::*; let mut file = File::open(filename).expect("loading file failed"); let mut buf = Vec::new(); file.read_to_end(&mut buf).expect("reading file failed"); buf } fn main() { let matches = App::new("My Super Program") .version("1.0") .author("Kevin K. <kbknapp@gmail.com>") .about("Does awesome things") .arg(Arg::with_name("wasmfile") .short("w") .long("wasmfile") .value_name("WASM_FILE") .help("provides the location to a wasm source file") .takes_value(true)) .arg(Arg::with_name("input") .short("i") .long("input") .value_name("INPUT") .help("input (hex) to be provided to the engine") .takes_value(true)) .get_matches(); let source = matches.value_of("wasmfile").expect("wasm file argument missing"); let input = matches.value_of("input").expect("execution input argument missing"); println!("input is {}", &input); let code = load_file(&source); let block_data = input.from_hex().expect("could decode input from hex");//[0u8; 32]; execute_code(&code, &block_data); }
/* * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use std::collections::BTreeSet; use std::collections::HashMap; use std::collections::HashSet; use std::fmt::Debug; use std::hash::Hash; use std::iter::FromIterator; use petgraph::unionfind::UnionFind; use crate::graph::SuccessorNodes; pub type WpoIdx = u32; pub struct WpoNodeData<NodeId: Copy + Hash + Ord> { node: NodeId, size: usize, successors: BTreeSet<WpoIdx>, predessors: BTreeSet<WpoIdx>, num_outer_preds: HashMap<WpoIdx, u32>, } impl<NodeId> WpoNodeData<NodeId> where NodeId: Copy + Hash + Ord, { pub fn new(node: NodeId, size: usize) -> Self { Self { node, size, successors: BTreeSet::new(), predessors: BTreeSet::new(), num_outer_preds: HashMap::new(), } } } #[derive(Debug, PartialEq, Eq)] pub enum WpoNodeType { Head, Plain, Exit, } pub struct WpoNode<NodeId: Copy + Ord + Hash> { ty: WpoNodeType, data: WpoNodeData<NodeId>, } impl<NodeId> WpoNode<NodeId> where NodeId: Copy + Ord + Hash + Debug, { pub fn plain(node: NodeId, size: usize) -> Self { Self { ty: WpoNodeType::Plain, data: WpoNodeData::new(node, size), } } pub fn head(node: NodeId, size: usize) -> Self { Self { ty: WpoNodeType::Head, data: WpoNodeData::new(node, size), } } pub fn exit(node: NodeId, size: usize) -> Self { Self { ty: WpoNodeType::Exit, data: WpoNodeData::new(node, size), } } pub fn new(ty: WpoNodeType, node: NodeId, size: usize) -> Self { Self { ty, data: WpoNodeData::new(node, size), } } pub fn is_plain(&self) -> bool { self.ty == WpoNodeType::Plain } pub fn is_head(&self) -> bool { self.ty == WpoNodeType::Head } pub fn is_exit(&self) -> bool { self.ty == WpoNodeType::Exit } pub fn get_node(&self) -> NodeId { self.data.node } pub fn get_successors(&self) -> &BTreeSet<WpoIdx> { &self.data.successors } pub fn get_predecessors(&self) -> &BTreeSet<WpoIdx> { &self.data.predessors } pub fn get_num_preds(&self) -> u32 { self.get_predecessors().len() as u32 } pub fn get_num_outer_preds(&self) -> &HashMap<WpoIdx, u32> { assert_eq!( self.ty, WpoNodeType::Exit, "Node {:#?} is not Exit", self.data.node ); &self.data.num_outer_preds } pub fn get_size(&self) -> usize { self.data.size } fn add_successor(&mut self, idx: WpoIdx) { self.data.successors.insert(idx); } fn add_predecessor(&mut self, idx: WpoIdx) { self.data.predessors.insert(idx); } fn is_successor(&self, idx: WpoIdx) -> bool { self.get_successors().contains(&idx) } pub fn inc_num_outer_preds(&mut self, idx: WpoIdx) { assert_eq!( self.ty, WpoNodeType::Exit, "Node {:#?} is not Exit", self.data.node ); *self.data.num_outer_preds.entry(idx).or_default() += 1; } } pub struct WeakPartialOrdering<NodeId: Copy + Hash + Ord> { /// All nodes under WPO. nodes: Vec<WpoNode<NodeId>>, /// All top level nodes. toplevel: Vec<WpoIdx>, /// Post depth first numbering for each node. post_dfn: HashMap<NodeId, u32>, } impl<NodeId> WeakPartialOrdering<NodeId> where NodeId: Copy + Hash + Ord + Debug, { pub fn new<SN>(root: NodeId, size: usize, successors_nodes: &SN) -> Self where SN: SuccessorNodes<NodeId = NodeId>, { if successors_nodes.get_succ_nodes(root).is_empty() { let mut wpo = Self { nodes: vec![], toplevel: vec![], post_dfn: HashMap::new(), }; wpo.nodes.push(WpoNode::plain(root, 1)); wpo.toplevel.push(0); wpo.post_dfn.insert(root, 1); wpo } else { WeakPartialOrderingImpl::new().build(size, root, successors_nodes) } } pub fn size(&self) -> usize { self.nodes.len() } pub fn get_entry(&self) -> WpoIdx { (self.nodes.len() - 1) as WpoIdx } pub fn get_successors(&self, idx: WpoIdx) -> &BTreeSet<WpoIdx> { self.nodes[idx as usize].get_successors() } pub fn get_predecessors(&self, idx: WpoIdx) -> &BTreeSet<WpoIdx> { self.nodes[idx as usize].get_predecessors() } pub fn get_num_preds(&self, idx: WpoIdx) -> u32 { self.nodes[idx as usize].get_num_preds() } pub fn get_num_outer_preds(&self, exit: WpoIdx) -> &HashMap<WpoIdx, u32> { self.nodes[exit as usize].get_num_outer_preds() } pub fn get_head_of_exit(&self, exit: WpoIdx) -> WpoIdx { exit + 1 } pub fn get_exit_of_head(&self, head: WpoIdx) -> WpoIdx { head - 1 } pub fn get_node(&self, idx: WpoIdx) -> NodeId { self.nodes[idx as usize].get_node() } pub fn is_plain(&self, idx: WpoIdx) -> bool { self.nodes[idx as usize].is_plain() } pub fn is_head(&self, idx: WpoIdx) -> bool { self.nodes[idx as usize].is_head() } pub fn is_exit(&self, idx: WpoIdx) -> bool { self.nodes[idx as usize].is_exit() } pub fn is_from_outside(&self, head: NodeId, pred: NodeId) -> bool { self.get_post_dfn(head) < self.get_post_dfn(pred) } fn get_post_dfn(&self, n: NodeId) -> u32 { // If the key does not exist, meaning that node is not // finished yet, return default value 0. self.post_dfn.get(&n).copied().unwrap_or_default() } } // This private type is only used to build the actual WPO. struct WeakPartialOrderingImpl<NodeId: Copy + Hash + Ord> { nodes: Vec<WpoNode<NodeId>>, toplevel: Vec<WpoIdx>, post_dfn: HashMap<NodeId, u32>, // A map from NodeId to post DFN. dfn: HashMap<NodeId, u32>, dfn_to_node: Vec<NodeId>, cross_fwd_edges: HashMap<u32, Vec<(u32, u32)>>, back_preds: HashMap<u32, Vec<u32>>, // Tree edges (map from node to its predecessors). non_back_preds: HashMap<u32, Vec<u32>>, next_dfn: u32, // Map from dfn to WpoIdx dfn_to_wpo_idx: Vec<WpoIdx>, // Next WpoIdx to assign next_idx: WpoIdx, } impl<NodeId> WeakPartialOrderingImpl<NodeId> where NodeId: Copy + Hash + Ord + Debug, { pub fn new() -> Self { // I really don't want to add `Default` bound to `NodeId`, so let's // have a bit tedious code here to give user side more flexibility. Self { next_dfn: 1u32, nodes: vec![], toplevel: vec![], post_dfn: HashMap::new(), dfn: HashMap::new(), dfn_to_node: vec![], cross_fwd_edges: HashMap::new(), back_preds: HashMap::new(), non_back_preds: HashMap::new(), dfn_to_wpo_idx: vec![], next_idx: 0, } } fn add_node(&mut self, dfn_i: u32, vertex: u32, sz: u32, ty: WpoNodeType) { self.dfn_to_wpo_idx[dfn_i as usize] = self.next_idx; self.next_idx += 1; self.nodes.push(WpoNode::new( ty, // dfn reserves 0, so should subtract 1 here. self.dfn_to_node[vertex as usize - 1], sz as usize, )); } fn node_of(&mut self, dfn_i: u32) -> &mut WpoNode<NodeId> { let idx = self.index_of(dfn_i) as usize; &mut self.nodes[idx] } fn index_of(&self, dfn_i: u32) -> u32 { self.dfn_to_wpo_idx[dfn_i as usize] } fn add_successor( &mut self, from: u32, to: u32, exit: u32, outer_pred: bool, for_outer_preds: &mut Vec<(WpoIdx, WpoIdx)>, ) { let from_idx = self.index_of(from); let to_idx = self.index_of(to); if !self.nodes[from_idx as usize].is_successor(to_idx) { if outer_pred { for_outer_preds.push((to_idx, self.index_of(exit))); } self.nodes[from_idx as usize].add_successor(to_idx); self.nodes[to_idx as usize].add_predecessor(from_idx); } } fn build_auxilary<SN>(&mut self, size: usize, root: NodeId, successors_nodes: &SN) where SN: SuccessorNodes<NodeId = NodeId>, { // Since 0 is reserved for undiscovered nodes, the total number of nodes // would be size + 1. let mut dft_dsets = UnionFind::<u32>::new(size + 1); let mut stack = Vec::new(); let mut next_post_dfn = 1u32; let mut visited = HashMap::new(); let mut ancestor = HashMap::new(); let get_dfn = |n: NodeId, dfn: &HashMap<NodeId, u32>| { // If the key does not exist, meaning that node is not // discovered yet, return default value 0. dfn.get(&n).copied().unwrap_or_default() }; let set_dfn = |n: NodeId, num: u32, dfn: &mut HashMap<NodeId, u32>| { dfn.insert(n, num); }; stack.push((root, false, 0u32)); while let Some((node, finished, pred)) = stack.pop() { if finished { self.post_dfn.insert(node, next_post_dfn); next_post_dfn += 1; let vertex = get_dfn(node, &self.dfn); visited.insert(vertex, true); dft_dsets.union(vertex, pred); ancestor.insert(dft_dsets.find_mut(pred), pred); } else { if get_dfn(node, &self.dfn) != 0 { // Skip forward edges. continue; } let vertex = self.next_dfn; self.next_dfn += 1; self.dfn_to_node.push(node); set_dfn(node, vertex, &mut self.dfn); ancestor.insert(vertex, vertex); stack.push((node, true, pred)); let successors = successors_nodes.get_succ_nodes(node); for &succ_node in successors.iter().rev() { let succ = get_dfn(succ_node, &self.dfn); if 0 == succ { stack.push((succ_node, false, vertex)); } else if visited.get(&succ).copied().unwrap_or_default() { let lca = ancestor.get(&dft_dsets.find_mut(succ)).copied().unwrap(); self.cross_fwd_edges .entry(lca) .or_default() .push((vertex, succ)); } else { self.back_preds.entry(succ).or_default().push(vertex); } } if pred != 0 { self.non_back_preds.entry(vertex).or_default().push(pred); } } } // Number of dfn should be equal or smaller (if there is unreachable node) // than grpah size + 1 (number 0 for undiscovered). assert!(self.next_dfn as usize <= size + 1); } fn build<SN>( mut self, size: usize, root: NodeId, successors_nodes: &SN, ) -> WeakPartialOrdering<NodeId> where SN: SuccessorNodes<NodeId = NodeId>, { // Step 1: construct auxilary data structures, including // classifying edges, finding lowest common ancestors // of cross/forward edges. self.build_auxilary(size, root, successors_nodes); // Step 2: start constructing WPO. let mut dsets = UnionFind::<u32>::new(self.next_dfn as usize); // Union find does not guarantee that the root of a subset has // always the minimum DFN, so we need to maintain this information. // Used for creating exit nodes. let mut exit_next_dfn = self.next_dfn; // Initializaiton. let mut rep: Vec<u32> = (0..self.next_dfn).collect(); let mut exit: Vec<u32> = (0..self.next_dfn).collect(); let mut origin: Vec<Vec<(u32, u32)>> = (0..self.next_dfn) .map(|v| { self.non_back_preds .get(&v) .map_or_else(std::vec::Vec::new, |non_back_preds_v| { non_back_preds_v.iter().map(|&p| (p, v)).collect() }) }) .collect(); self.dfn_to_wpo_idx.resize(2 * self.next_dfn as usize, 0); let mut for_outer_preds = Vec::<(WpoIdx, WpoIdx)>::new(); let mut components_sizes = vec![0u32; self.next_dfn as usize]; let mut parent = HashMap::<WpoIdx, WpoIdx>::new(); // In descending order, excluding 0 which is for undiscovered. for h in (1..self.next_dfn).rev() { // Restore cross/forward edges if let Some(edges) = self.cross_fwd_edges.get(&h) { for &(u, v) in edges { let rep_v = rep[dsets.find(v) as usize]; self.non_back_preds.entry(rep_v).or_default().push(u); origin[rep_v as usize].push((u, v)); } } // Find nested SCCs. let mut is_scc = false; let mut backpreds_h = HashSet::<u32>::new(); if let Some(preds) = self.back_preds.get(&h) { for &v in preds { if v != h { backpreds_h.insert(rep[dsets.find(v) as usize]); } else { is_scc = true; } } } if !backpreds_h.is_empty() { is_scc = true; } let mut nested_sccs_h = backpreds_h.clone(); let mut worklist_h = Vec::from_iter(backpreds_h.iter().copied()); while let Some(v) = worklist_h.pop() { if let Some(preds) = self.non_back_preds.get(&v) { for &p in preds { let rep_p = rep[dsets.find(p) as usize]; if !nested_sccs_h.contains(&rep_p) && rep_p != h { worklist_h.push(rep_p); nested_sccs_h.insert(rep_p); } } } } // h represents a trivial SCC. if !is_scc { components_sizes[h as usize] = 1; self.add_node(h, h, 1, WpoNodeType::Plain); continue; } // Initialize size to 2 for head and exit. let mut sz_h = 2; for &v in nested_sccs_h.iter() { sz_h += components_sizes[v as usize]; } components_sizes[h as usize] = sz_h; // Add new exit. let x_h = exit_next_dfn; exit_next_dfn += 1; self.add_node(x_h, h, sz_h, WpoNodeType::Exit); // Wpo index of head is then exit + 1 for the same component. self.add_node(h, h, sz_h, WpoNodeType::Head); if backpreds_h.is_empty() { // Scheduling constraints from h to x_h. self.add_successor(h, x_h, x_h, false, &mut for_outer_preds); } else { for p in backpreds_h { self.add_successor(exit[p as usize], x_h, x_h, false, &mut for_outer_preds); } } // Scheduling constraints between WPOs for nested SCCs. for &v in nested_sccs_h.iter() { for &(u, vv) in origin[v as usize].iter() { let x_u = exit[rep[dsets.find(u) as usize] as usize]; let x_v = exit[v as usize]; self.add_successor(x_u, vv, x_v, x_v != v, &mut for_outer_preds); } } // Merging all reps in nested SCCs to h for &v in nested_sccs_h.iter() { dsets.union(v, h); rep[dsets.find(v) as usize] = h; parent.insert(self.index_of(v), self.index_of(h)); } exit[h as usize] = x_h; } // Scheduling constraints between WPOs for maximal SCCs. self.toplevel.reserve(self.next_dfn as usize); for v in 1..self.next_dfn { if rep[dsets.find(v) as usize] == v { let v_idx = self.index_of(v); self.toplevel.push(v_idx); parent.insert(v_idx, v_idx); for &(u, vv) in origin[v as usize].iter() { let x_u = exit[rep[dsets.find(u) as usize] as usize]; let x_v = exit[v as usize]; self.add_successor(x_u, vv, x_v, x_v != v, &mut for_outer_preds); } } } // Compute num_outer_preds. for &(v, x_max) in for_outer_preds.iter() { let mut h = if self.nodes[v as usize].is_head() { v } else { *parent.get(&v).unwrap() }; let mut x = h - 1; while x != x_max { self.nodes[x as usize].inc_num_outer_preds(v); h = *parent.get(&h).unwrap(); x = h - 1; } self.nodes[x as usize].inc_num_outer_preds(v); } WeakPartialOrdering { nodes: self.nodes, toplevel: self.toplevel, post_dfn: self.post_dfn, } } }
//! [Rc<T>], the Reference Counted Smart Pointer //! //! [rc<t>]: https://doc.rust-lang.org/book/ch15-04-rc.html use std::rc::Rc; use the_book::ch15::sec04::List::{Cons, Nil}; fn main() { let a = Rc::new(Cons(5, Rc::new(Cons(10, Rc::new(Nil))))); println!("a's strong count={} in the beginning", Rc::strong_count(&a)); let _b = Cons(3, Rc::clone(&a)); println!( "a's strong count={} after referenced by b", Rc::strong_count(&a) ); { let _c = Cons(4, Rc::clone(&a)); println!( "a's strong count={} after referenced by c", Rc::strong_count(&a) ); } println!( "a's strong count={} after getting out of c's scope", Rc::strong_count(&a) ); println!("done"); }
// Copyright 2020 ChainSafe Systems // SPDX-License-Identifier: Apache-2.0, MIT use super::{Error, TipIndex, TipsetMetadata}; use actor::{power::State as PowerState, STORAGE_POWER_ACTOR_ADDR}; use blocks::{Block, BlockHeader, FullTipset, Tipset, TipsetKeys, TxMeta}; use cid::Cid; use encoding::{de::DeserializeOwned, from_slice, Cbor}; use ipld_amt::Amt; use ipld_blockstore::BlockStore; use log::{info, warn}; use message::{SignedMessage, UnsignedMessage}; use num_bigint::BigUint; use num_traits::Zero; use state_tree::StateTree; use std::sync::Arc; const GENESIS_KEY: &str = "gen_block"; const HEAD_KEY: &str = "head"; // constants for Weight calculation /// The ratio of weight contributed by short-term vs long-term factors in a given round const W_RATIO_NUM: u64 = 1; const W_RATIO_DEN: u64 = 2; /// Blocks epoch allowed const BLOCKS_PER_EPOCH: u64 = 5; /// Generic implementation of the datastore trait and structures pub struct ChainStore<DB> { // TODO add IPLD Store // TODO add a pubsub channel that publishes an event every time the head changes. // key-value datastore pub db: Arc<DB>, // Tipset at the head of the best-known chain. heaviest: Option<Arc<Tipset>>, // tip_index tracks tipsets by epoch/parentset for use by expected consensus. tip_index: TipIndex, } impl<DB> ChainStore<DB> where DB: BlockStore, { /// constructor pub fn new(db: Arc<DB>) -> Self { let heaviest = get_heaviest_tipset(db.as_ref()) .unwrap_or(None) .map(Arc::new); Self { db, tip_index: TipIndex::new(), heaviest, } } /// Sets heaviest tipset within ChainStore and store its tipset cids under HEAD_KEY pub fn set_heaviest_tipset(&mut self, ts: Arc<Tipset>) -> Result<(), Error> { self.db.write(HEAD_KEY, ts.key().marshal_cbor()?)?; self.heaviest = Some(ts); Ok(()) } /// Sets tip_index tracker pub fn set_tipset_tracker(&mut self, header: &BlockHeader) -> Result<(), Error> { let ts: Tipset = Tipset::new(vec![header.clone()])?; let meta = TipsetMetadata { tipset_state_root: header.state_root().clone(), tipset_receipts_root: header.message_receipts().clone(), tipset: ts, }; self.tip_index.put(&meta); Ok(()) } /// Writes genesis to blockstore pub fn set_genesis(&mut self, header: BlockHeader) -> Result<(), Error> { self.db.write(GENESIS_KEY, header.marshal_cbor()?)?; Ok(self.persist_headers(&[header])?) } /// Writes encoded blockheader data to blockstore fn persist_headers(&mut self, bh: &[BlockHeader]) -> Result<(), Error> { let mut raw_header_data = Vec::new(); let mut keys = Vec::new(); // loop through block to push blockheader raw data and cid into vector to be stored for header in bh { if !self.db.exists(header.cid().key())? { raw_header_data.push(header.marshal_cbor()?); keys.push(header.cid().key()); } } Ok(self.db.bulk_write(&keys, &raw_header_data)?) } /// Writes tipset block headers to data store and updates heaviest tipset pub fn put_tipsets(&mut self, ts: &Tipset) -> Result<(), Error> { self.persist_headers(ts.blocks())?; // TODO determine if expanded tipset is required; see https://github.com/filecoin-project/lotus/blob/testnet/3/chain/store/store.go#L236 self.update_heaviest(ts)?; Ok(()) } /// Writes encoded message data to blockstore pub fn put_messages<T: Cbor>(&self, msgs: &[T]) -> Result<(), Error> { for m in msgs { let key = m.cid()?.key(); let value = &m.marshal_cbor()?; if self.db.exists(&key)? { return Ok(()); } self.db.write(&key, value)? } Ok(()) } /// Loads heaviest tipset from datastore and sets as heaviest in chainstore pub fn load_heaviest_tipset(&mut self) -> Result<(), Error> { let heaviest_ts = get_heaviest_tipset(self.db.as_ref())?.ok_or_else(|| { warn!("No previous chain state found"); Error::Other("No chain state found".to_owned()) })?; // set as heaviest tipset self.heaviest = Some(Arc::new(heaviest_ts)); Ok(()) } /// Returns genesis blockheader from blockstore pub fn genesis(&self) -> Result<Option<BlockHeader>, Error> { Ok(match self.db.read(GENESIS_KEY)? { Some(bz) => Some(BlockHeader::unmarshal_cbor(&bz)?), None => None, }) } /// Returns heaviest tipset from blockstore pub fn heaviest_tipset(&self) -> Option<Arc<Tipset>> { self.heaviest.clone() } /// Returns key-value store instance pub fn blockstore(&self) -> &DB { &self.db } /// Returns Tipset from key-value store from provided cids pub fn tipset_from_keys(&self, tsk: &TipsetKeys) -> Result<Tipset, Error> { tipset_from_keys(self.db.as_ref(), tsk) } /// Returns a tuple of cids for both Unsigned and Signed messages fn read_msg_cids(&self, msg_cid: &Cid) -> Result<(Vec<Cid>, Vec<Cid>), Error> { if let Some(roots) = self .blockstore() .get::<TxMeta>(msg_cid) .map_err(|e| Error::Other(e.to_string()))? { let bls_cids = self.read_amt_cids(&roots.bls_message_root)?; let secpk_cids = self.read_amt_cids(&roots.secp_message_root)?; Ok((bls_cids, secpk_cids)) } else { Err(Error::UndefinedKey("no msgs with that key".to_string())) } } /// Returns a vector of cids from provided root cid fn read_amt_cids(&self, root: &Cid) -> Result<Vec<Cid>, Error> { let amt = Amt::load(root, self.blockstore())?; let mut cids = Vec::new(); for i in 0..amt.count() { if let Some(c) = amt.get(i)? { cids.push(c); } } Ok(cids) } /// Returns a Tuple of bls messages of type UnsignedMessage and secp messages /// of type SignedMessage pub fn messages( &self, bh: &BlockHeader, ) -> Result<(Vec<UnsignedMessage>, Vec<SignedMessage>), Error> { let (bls_cids, secpk_cids) = self.read_msg_cids(bh.messages())?; let bls_msgs: Vec<UnsignedMessage> = self.messages_from_cids(bls_cids)?; let secp_msgs: Vec<SignedMessage> = self.messages_from_cids(secpk_cids)?; Ok((bls_msgs, secp_msgs)) } /// Returns messages from key-value store pub fn messages_from_cids<T>(&self, keys: Vec<Cid>) -> Result<Vec<T>, Error> where T: DeserializeOwned, { keys.iter() .map(|k| { let value = self.db.read(&k.key())?; let bytes = value.ok_or_else(|| Error::UndefinedKey(k.to_string()))?; // Decode bytes into type T let t = from_slice(&bytes)?; Ok(t) }) .collect() } /// Constructs and returns a full tipset if messages from storage exists pub fn fill_tipsets(&self, ts: Tipset) -> Result<FullTipset, Error> { let mut blocks: Vec<Block> = Vec::with_capacity(ts.blocks().len()); for header in ts.into_blocks() { let (bls_messages, secp_messages) = self.messages(&header)?; blocks.push(Block { header, bls_messages, secp_messages, }); } // the given tipset has already been verified, so this cannot fail Ok(FullTipset::new(blocks).unwrap()) } /// Determines if provided tipset is heavier than existing known heaviest tipset fn update_heaviest(&mut self, ts: &Tipset) -> Result<(), Error> { match &self.heaviest { Some(heaviest) => { let new_weight = self.weight(ts)?; let curr_weight = self.weight(&heaviest)?; if new_weight > curr_weight { // TODO potentially need to deal with re-orgs here info!("New heaviest tipset"); self.set_heaviest_tipset(Arc::new(ts.clone()))?; } } None => { info!("set heaviest tipset"); self.set_heaviest_tipset(Arc::new(ts.clone()))?; } } Ok(()) } /// Returns the weight of provided tipset fn weight(&self, ts: &Tipset) -> Result<BigUint, String> { let mut tpow = BigUint::zero(); let state = StateTree::new_from_root(self.db.as_ref(), ts.parent_state())?; if let Some(act) = state.get_actor(&*STORAGE_POWER_ACTOR_ADDR)? { if let Some(state) = self .db .get::<PowerState>(&act.state) .map_err(|e| e.to_string())? { tpow = state.total_network_power; } } let log2_p = if tpow > BigUint::zero() { BigUint::from(tpow.bits() - 1) } else { return Err("All power in the net is gone. You network might be disconnected, or the net is dead!".to_owned()); }; let mut out = ts.weight() + (&log2_p << 8); let e_weight = ((log2_p * BigUint::from(ts.blocks().len())) * BigUint::from(W_RATIO_NUM)) << 8; let value = e_weight / (BigUint::from(BLOCKS_PER_EPOCH) * BigUint::from(W_RATIO_DEN)); out += &value; Ok(out) } } fn get_heaviest_tipset<DB>(db: &DB) -> Result<Option<Tipset>, Error> where DB: BlockStore, { match db.read(HEAD_KEY)? { Some(bz) => { let keys: Vec<Cid> = from_slice(&bz)?; Ok(Some(tipset_from_keys(db, &TipsetKeys::new(keys))?)) } None => Ok(None), } } /// Returns Tipset from key-value store from provided cids fn tipset_from_keys<DB>(db: &DB, tsk: &TipsetKeys) -> Result<Tipset, Error> where DB: BlockStore, { let mut block_headers = Vec::new(); for c in tsk.cids() { let raw_header = db.read(c.key())?; if let Some(x) = raw_header { // decode raw header into BlockHeader let bh = BlockHeader::unmarshal_cbor(&x)?; block_headers.push(bh); } else { return Err(Error::NotFound("Key for header")); } } // construct new Tipset to return let ts = Tipset::new(block_headers)?; Ok(ts) } #[cfg(test)] mod tests { use super::*; use address::Address; use cid::multihash::Identity; #[test] fn genesis_test() { let db = db::MemoryDB::default(); let mut cs = ChainStore::new(Arc::new(db)); let gen_block = BlockHeader::builder() .epoch(1) .weight((2 as u32).into()) .messages(Cid::new_from_cbor(&[], Identity)) .message_receipts(Cid::new_from_cbor(&[], Identity)) .state_root(Cid::new_from_cbor(&[], Identity)) .miner_address(Address::new_id(0)) .build_and_validate() .unwrap(); assert_eq!(cs.genesis().unwrap(), None); cs.set_genesis(gen_block.clone()).unwrap(); assert_eq!(cs.genesis().unwrap(), Some(gen_block)); } }
//! Asynchronous engine for running THavalon games use std::collections::{HashMap, HashSet}; use serde::{Deserialize, Serialize}; use thiserror::Error; use super::role::{PriorityTarget, RoleDetails, Team}; use super::{Card, MissionNumber}; // Game-related messages /// Something the player tries to do #[derive(Debug, Clone, Eq, PartialEq, Deserialize)] pub enum Action { Propose { players: HashSet<String>, }, SelectPlayer { player: String, }, UnselectPlayer { player: String, }, Vote { upvote: bool, }, Play { card: Card, }, Obscure, QuestingBeast, Declare, Assassinate { players: HashSet<String>, target: PriorityTarget, }, MoveToAssassination, } /// A message from the game to a player #[derive(Debug, Clone, Eq, PartialEq, Serialize)] #[serde(tag = "messageType", content = "data")] #[serde(rename_all = "camelCase")] pub enum Message { /// Error message, usually when a player does something wrong Error(String), /// The proposal order of the players in the game. Sent at the start of the game. ProposalOrder(Vec<String>), /// Sends the player their role and information RoleInformation { details: RoleDetails }, /// Announces that a new player is proposing NextProposal { /// The player who will be proposing proposer: String, /// The mission this proposal is for mission: MissionNumber, /// The number of proposals made so far, excluding mission 1 and sent proposals proposals_made: usize, /// The maximum number of unsent proposals before force activates max_proposals: usize, /// The number of players to include on the proposal mission_size: usize, }, /// The current proposal was updated ProposalUpdated { players: HashSet<String> }, /// Announces that a player made a proposal ProposalMade { /// The player who made the proposal proposer: String, /// The mission they're proposing for mission: MissionNumber, /// The players on the mission players: HashSet<String>, }, /// Announces that players should submit votes for the latest proposal. CommenceVoting, /// Announces that a player has submitted a valid vote. VoteReceived, /// Announces the results of a vote VotingResults { sent: bool, counts: VoteCounts }, /// Announces that a mission is going MissionGoing { mission: MissionNumber, players: HashSet<String>, }, /// Announces the results of a mission going MissionResults { mission: MissionNumber, successes: usize, fails: usize, reverses: usize, questing_beasts: usize, passed: bool, }, /// Agravaine declared, so the given mission now failed. AgravaineDeclaration { mission: MissionNumber, player: String, }, /// Assassination has begun. This can either be because 3 missions passed or because the assassin moved to assassinate. BeginAssassination { assassin: String }, /// The results of an assassination attempt. AssassinationResult { /// The players that were assassinated (usually just 1) players: HashSet<String>, /// What the players were assassinated as target: PriorityTarget, /// Whether or not the assassination was correct correct: bool, }, /// Sent to Arthur to indicate that they can declare ArthurCanDeclare, /// Sent to Arthur to indicate that they cannot declare ArthurCannotDeclare, /// Announces that Arthur has declared // TODO: generic Declaration message with the player and role instead? ArthurDeclaration { player: String }, /// Sent when the game is over to announce who won. GameOver { winning_team: Team, roles: HashMap<String, RoleDetails>, }, /// Message that a client should surface to the end user. Toast { severity: ToastSeverity, message: String, }, } /// Severity of a toast notification #[derive(Debug, Clone, Eq, PartialEq, Serialize)] pub enum ToastSeverity { INFO, WARN, URGENT, } /// How players voted on a proposal #[derive(Debug, Clone, Eq, PartialEq, Serialize)] #[serde(tag = "voteType")] pub enum VoteCounts { /// Public mission votes, where it is known who up- or downvoted. Public { upvotes: HashSet<String>, downvotes: HashSet<String>, }, /// Obscured mission votes, where it is not known who up- or downvoted. Obscured { upvotes: u32, downvotes: u32 }, } #[derive(Error, Debug, Serialize)] pub enum GameError { #[error("Could not communicate with player")] PlayerDisconnected, #[error("Internal interaction error")] #[serde(serialize_with = "serialize_internal_error")] Internal(#[from] Box<dyn std::error::Error + Send + 'static>), } #[allow(clippy::borrowed_box)] // We need &Box<T> instead of &T here to match what serde expects and to add the Send + 'static constraints fn serialize_internal_error<S: serde::Serializer>( error: &Box<dyn std::error::Error + Send + 'static>, ser: S, ) -> Result<S::Ok, S::Error> { let error_message = error.to_string(); ser.serialize_str(&error_message) }
use serenity::client::CACHE; use serenity::model::*; use serenity::voice; use serenity::Result as SerenityResult; command!(deafen(ctx, msg) { let guild_id = match CACHE.read().unwrap().guild_channel(msg.channel_id) { Some(channel) => channel.read().unwrap().guild_id, None => { check_msg(msg.channel_id.say("Groups and DMs not supported")); return Ok(()); }, }; let mut shard = ctx.shard.lock(); let handler = match shard.manager.get(guild_id) { Some(handler) => handler, None => { check_msg(msg.reply("Not in a voice channel")); return Ok(()); }, }; if handler.self_deaf { check_msg(msg.channel_id.say("Already deafened")); } else { handler.deafen(true); check_msg(msg.channel_id.say("Deafened")); } }); command!(join(ctx, msg, args) { let connect_to = match args.get(0) { Some(arg) => match arg.parse::<u64>() { Ok(id) => ChannelId(id), Err(_why) => { check_msg(msg.reply("Invalid voice channel ID given")); return Ok(()); }, }, None => { check_msg(msg.reply("Requires a voice channel ID be given")); return Ok(()); }, }; let guild_id = match CACHE.read().unwrap().guild_channel(msg.channel_id) { Some(channel) => channel.read().unwrap().guild_id, None => { check_msg(msg.channel_id.say("Groups and DMs not supported")); return Ok(()); }, }; let mut shard = ctx.shard.lock(); shard.manager.join(guild_id, connect_to); check_msg(msg.channel_id.say(&format!("Joined {}", connect_to.mention()))); }); command!(leave(ctx, msg) { let guild_id = match CACHE.read().unwrap().guild_channel(msg.channel_id) { Some(channel) => channel.read().unwrap().guild_id, None => { check_msg(msg.channel_id.say("Groups and DMs not supported")); return Ok(()); }, }; let mut shard = ctx.shard.lock(); let has_handler = shard.manager.get(guild_id).is_some(); if has_handler { shard.manager.remove(guild_id); check_msg(msg.channel_id.say("Left voice channel")); } else { check_msg(msg.reply("Not in a voice channel")); } }); command!(mute(ctx, msg) { let guild_id = match CACHE.read().unwrap().guild_channel(msg.channel_id) { Some(channel) => channel.read().unwrap().guild_id, None => { check_msg(msg.channel_id.say("Groups and DMs not supported")); return Ok(()); }, }; let mut shard = ctx.shard.lock(); let handler = match shard.manager.get(guild_id) { Some(handler) => handler, None => { check_msg(msg.reply("Not in a voice channel")); return Ok(()); }, }; if handler.self_mute { check_msg(msg.channel_id.say("Already muted")); } else { handler.mute(true); check_msg(msg.channel_id.say("Now muted")); } }); command!(play(ctx, msg, args) { let url = match args.get(0) { Some(url) => url, None => { check_msg(msg.channel_id.say("Must provide a URL to a video or audio")); return Ok(()); }, }; if !url.starts_with("http") { check_msg(msg.channel_id.say("Must provide a valid URL")); return Ok(()); } let guild_id = match CACHE.read().unwrap().guild_channel(msg.channel_id) { Some(channel) => channel.read().unwrap().guild_id, None => { check_msg(msg.channel_id.say("Error finding channel info")); return Ok(()); }, }; if let Some(handler) = ctx.shard.lock().manager.get(guild_id) { let source = match voice::ytdl(url) { Ok(source) => source, Err(why) => { println!("Err starting source: {:?}", why); check_msg(msg.channel_id.say("Error sourcing ffmpeg")); return Ok(()); }, }; handler.play(source); check_msg(msg.channel_id.say("Playing song")); } else { check_msg(msg.channel_id.say("Not in a voice channel to play in")); } }); command!(undeafen(ctx, msg) { let guild_id = match CACHE.read().unwrap().guild_channel(msg.channel_id) { Some(channel) => channel.read().unwrap().guild_id, None => { check_msg(msg.channel_id.say("Error finding channel info")); return Ok(()); }, }; if let Some(handler) = ctx.shard.lock().manager.get(guild_id) { handler.deafen(false); check_msg(msg.channel_id.say("Undeafened")); } else { check_msg(msg.channel_id.say("Not in a voice channel to undeafen in")); } }); command!(unmute(ctx, msg) { let guild_id = match CACHE.read().unwrap().guild_channel(msg.channel_id) { Some(channel) => channel.read().unwrap().guild_id, None => { check_msg(msg.channel_id.say("Error finding channel info")); return Ok(()); }, }; if let Some(handler) = ctx.shard.lock().manager.get(guild_id) { handler.mute(false); check_msg(msg.channel_id.say("Unmuted")); } else { check_msg(msg.channel_id.say("Not in a voice channel to undeafen in")); } }); /// Checks that a message successfully sent; if not, then logs why to stdout. fn check_msg(result: SerenityResult<Message>) { if let Err(why) = result { println!("Error sending message: {:?}", why); } }
use std::process; use syslog::{BasicLogger, Facility, Formatter3164}; pub fn setup(name: &str, level: log::Level, syslog: bool) -> Result<(), String> { if syslog { let formatter = Formatter3164 { facility: Facility::LOG_USER, hostname: None, process: name.to_string(), pid: process::id() as i32, }; let logger = match syslog::unix(formatter) { Ok(v) => v, Err(err) => return Err(format!("create syslog logger error: {}", err).to_string()), }; log::set_boxed_logger(Box::new(BasicLogger::new(logger))) .map(|()| log::set_max_level(level.to_level_filter())) .unwrap(); } else { simple_logger::init_with_level(level).unwrap(); } return Ok(()); }
#[derive(Clone)] struct EmptyClient { public_key: (), period: time::Duration, genesis_time: time::SystemTime, hash: Vec<u8>, } impl From<EmptyClient> for ClientInfo { fn from(val: EmptyClient) -> Self { ClientInfo { public_key: val.public_key, period: val.period, genesis_time: val.genesis_time, hash: val.hash, } } } impl Client for EmptyClient { type I = EmptyClient; type R = ClientRound; fn new<I: Info>(info: I) -> EmptyClient { EmptyClient { public_key: info.to_public_key(), period: info.to_period(), genesis_time: info.to_genesis_time(), hash: info.to_info(), } } fn to_info(&self) -> Result<Info> { self.info.clone() } fn round_at(&self, t: time::SystemTime) -> Result<u128> { let next_round = match t.elapsed(self.info.genesis_time) { Ok(dur) => { let dur = dur.as_secs; // gives us the number of periods since genesis // we add +1 since we want the next round // we also add +1 because round 1 starts at genesis time. match dur % self.info.period { 0 => (dur / self.info.period) + 1, _ => (dur / self.info.period) + 1 + 1, } } Err(_) => 1, }; Ok(next_round) } fn get_round(&self, round: u128) -> Result<u128> { Err(Error::EmptyClient) } fn watch_rounds(&self) -> Result<Box<dyn Iterator<Item = Result<Self::R>>>> { Ok(Box::new(vec![].into_iter())) } } impl Info for EmptyClient { fn to_public_key(&self) -> () { self.public_key.clone() } fn to_period(&self) -> time::Duration { self.period.clone() } fn to_genesis_time(&self) -> time::SystemTime { self.genesis_time.clone() } fn as_hash(&self) -> &[u8] { self.hash.as_slice() } }
use model::*; use utils::*; use std::fmt::Write; use std::vec::Vec; use std::str; use reqwest::header::Headers; use reqwest::unstable::async::Client as AsyncReqClient; use reqwest::Method; use tokio_core::reactor::Handle; use futures::future::Future; use futures::{Stream, IntoFuture}; use {API_V1, WAPI_V3, API_V3}; pub struct AsyncBnbClient { secret_key : Option<Vec<u8>>, api_key : Option<String>, req_client : AsyncReqClient, exchange_info : Option<ExchangeInfo>, } impl AsyncBnbClient { pub fn new(handle: &Handle, secret_key: Option<&str>, api_key: Option<&str>) -> AsyncBnbClient { AsyncBnbClient { secret_key : secret_key.map(Into::into), api_key : api_key.map(Into::into), req_client : AsyncReqClient::new(handle), exchange_info : None, } } pub fn init_exchange_info(&mut self) -> Result<(), BnbError> { self.exchange_info = Some(self.fetch_exchange_info().wait()?); Ok(()) } // Request with API key and signed with API secret fn req_signed(&self, base_url: &str, params: &str, method: Method) -> Box<Future<Item = String, Error = BnbError>>{ let sec_key = match self.secret_key.as_ref() { Some(sec_key) => sec_key, None => return Box::new(Err(BnbError::Other("Missing secret key".into())).into_future()), }; let sign = sign_string(sec_key, params); let url = format!("{}?{}&signature={}", base_url, params, sign); self.req_with_key(&url, method) } // Request with API key but no signing fn req_with_key(&self, url: &str, method: Method) -> Box<Future<Item = String, Error = BnbError>> { let api_key = match self.api_key.as_ref() { Some(api_key) => api_key.as_str(), None => return Box::new(Err(BnbError::Other("Missing API key".into())).into_future()), }; let mut h = Headers::new(); h.set_raw("X-MBX-APIKEY", api_key); let fut = self.req_client.request(method, url) .headers(h) .send() .map(|resp| resp.into_body().concat2()) .flatten() .map(|bytes| String::from_utf8(bytes.to_vec()).unwrap()) .map_err(|e| BnbError::RequestError(e.status().map(|s| s.as_u16()))); Box::new(fut) } // Public request fn req(&self, url: &str, method: Method) -> Box<Future<Item = String, Error = BnbError>> { let fut = self.req_client.request(method, url) .send() .map(|resp| resp.into_body().concat2()) .flatten() .map(|bytes| String::from_utf8(bytes.to_vec()).unwrap()) .map_err(|e| BnbError::RequestError(e.status().map(|s| s.as_u16()))); Box::new(fut) } /* Public requests */ pub fn fetch_exchange_info(&self) -> Box<Future<Item = ExchangeInfo, Error = BnbError>> { let url = API_V1.to_string() + "/exchangeInfo"; let fut = self.req(&url, Method::Get) .map(|raw_resp| deserialize::<ExchangeInfo>(&raw_resp)) .flatten(); Box::new(fut) } pub fn fetch_ticker24h(&self) -> Box<Future< Item = Vec<Ticker24h>, Error = BnbError>> { let url = API_V1.to_string() + "/ticker/24hr"; let fut = self.req(&url, Method::Get) .map(|raw_resp| deserialize(&raw_resp)) .flatten(); Box::new(fut) } pub fn ping(&self) -> Box<Future<Item = i64, Error = BnbError>> { let url = API_V1.to_string() + "/ping"; let tic = timestamp_now_ms(); let fut = self.req(&url, Method::Get) .map(|raw_resp| deserialize_unitlike(&raw_resp)) .flatten() .map(move |_| timestamp_now_ms() - tic); Box::new(fut) } pub fn fetch_server_time(&self) -> Box<Future<Item = i64, Error = BnbError>> { let url = API_V1.to_string() + "/time"; let fut = self.req(&url, Method::Get) .map(|raw_resp| deserialize::<ServerTime>(&raw_resp)) .flatten() .map(|st| st.server_time); Box::new(fut) } pub fn fetch_order_book(&self, symbol: &str, depth: i64) -> Box<Future<Item = OrderBook, Error = BnbError>> { let mut url = API_V1.to_string() + "/depth"; write!(url, "?symbol={}&limit={}", symbol.to_uppercase(), depth).ok(); let fut = self.req(&url, Method::Get) .map(|raw_resp| deserialize(&raw_resp)) .flatten(); Box::new(fut) } pub fn fetch_candles(&self, symbol: &str, interval: &str, limit: i32, start_time: Option<i64>, end_time: Option<i64>) -> Box<Future<Item = Vec<Candle>, Error = BnbError>> { let mut url = API_V1.to_string() + "/klines"; let params = opt_params_to_string![ ("symbol", Some(symbol)), ("interval", Some(interval)), ("limit", Some(limit)), ("startTime", start_time), ("endTime", end_time) ]; write!(url, "?{}", params).ok(); let fut = self.req(&url, Method::Get) .map(|raw_resp| deserialize(&raw_resp)) .flatten(); Box::new(fut) } /* Requests with API key */ pub fn fetch_listen_key(&self) -> Box<Future<Item = String, Error = BnbError>> { let url = API_V1.to_string() + "/userDataStream"; let fut = self.req_with_key(&url, Method::Post) .map(|raw_resp| deserialize::<ListenKey>(&raw_resp)) .flatten() .map(|lk| lk.listen_key); Box::new(fut) } pub fn keepalive_listen_key(&self, listen_key: &str) -> Box<Future<Item = (), Error = BnbError>> { let mut url = API_V1.to_string() + "/userDataStream"; write!(url, "?listenKey={}", listen_key).ok(); let fut = self.req_with_key(&url, Method::Put) .map(|raw_resp| deserialize_unitlike(&raw_resp)) .flatten() .map(|_| ()); Box::new(fut) } pub fn close_listen_key(&self, listen_key: &str) -> Box<Future<Item = (), Error = BnbError>> { let mut url = API_V1.to_string() + "/userDataStream"; write!(url, "?listenKey={}", listen_key).ok(); let fut = self.req_with_key(&url, Method::Delete) .map(|raw_resp| deserialize_unitlike(&raw_resp)) .flatten() .map(|_| ()); Box::new(fut) } /* Signed requests */ pub fn post_order(&self, order: Order) -> Box<Future<Item = OrderResponseResult, Error = BnbError>> { let ex_info = match self.exchange_info.as_ref() { Some(ex_info) => ex_info, None => return Box::new(Err(BnbError::Other("Missing exchange info".into())).into_future()), }; let sym_info = match ex_info.symbols.get(&order.symbol) { Some(sym_info) => sym_info, None => return Box::new(Err(BnbError::Other(format!("Invalid symbol: {}", &order.symbol))).into_future()), }; let url = API_V3.to_string() + "/order"; let params = order.to_url_params(sym_info); let fut = self.req_signed(&url, &params, Method::Post) .map(|raw_resp| deserialize(&raw_resp)) .flatten(); Box::new(fut) } pub fn fetch_account_info(&self) -> Box<Future<Item = AccountInfo, Error = BnbError>> { let url = API_V3.to_string() + "/account"; let params = format!("timestamp={}", timestamp_now_ms()); let fut = self.req_signed(&url, &params, Method::Get) .map(|raw_resp| deserialize(&raw_resp)) .flatten(); Box::new(fut) } pub fn fetch_trades(&self, symbol: &str, limit: i32, from_id: Option<i32>) -> Box<Future<Item = Vec<Trade>, Error = BnbError>> { let url = API_V3.to_string() + "/myTrades"; let params = opt_params_to_string![ ("symbol", Some(symbol)), ("limit", Some(limit)), ("fromId", from_id), ("timestamp", Some(timestamp_now_ms())) ]; let fut = self.req_signed(&url, &params, Method::Get) .map(|raw_resp| deserialize(&raw_resp)) .flatten(); Box::new(fut) } pub fn fetch_all_orders(&self, symbol: &str, limit: i32, order_id: Option<i32>) -> Box<Future<Item = Vec<HistoricalOrder>, Error = BnbError>> { let url = API_V3.to_string() + "/allOrders"; let params = opt_params_to_string![ ("symbol", Some(symbol)), ("limit", Some(limit)), ("orderId", order_id), ("timestamp", Some(timestamp_now_ms())) ]; let fut = self.req_signed(&url, &params, Method::Get) .map(|raw_resp| deserialize(&raw_resp)) .flatten(); Box::new(fut) } pub fn fetch_open_orders(&self, symbol: Option<&str>) -> Box<Future<Item = Vec<HistoricalOrder>, Error = BnbError>> { let url = API_V3.to_string() + "/openOrders"; let params = opt_params_to_string![ ("symbol", symbol), ("timestamp", Some(timestamp_now_ms())) ]; let fut = self.req_signed(&url, &params, Method::Get) .map(|raw_resp| deserialize(&raw_resp)) .flatten(); Box::new(fut) } pub fn cancel_order(&self, symbol: &str, order_id: Option<i64>, client_order_id: Option<&str>) -> Box<Future<Item = CanceledOrder, Error = BnbError>> { let url = API_V3.to_string() + "/order"; let params = opt_params_to_string![ ("symbol", Some(symbol)), ("orderId", order_id), ("origClientOrderId", client_order_id), ("timestamp", Some(timestamp_now_ms())) ]; let fut = self.req_signed(&url, &params, Method::Delete) .map(|raw_resp| deserialize(&raw_resp)) .flatten(); Box::new(fut) } pub fn fetch_order(&self, symbol: &str, order_id: Option<i64>, client_order_id: Option<&str>) -> Box<Future<Item = HistoricalOrder, Error = BnbError>> { let url = API_V3.to_string() + "/order"; let params = opt_params_to_string![ ("symbol", Some(symbol)), ("orderId", order_id), ("origClientOrderId", client_order_id), ("timestamp", Some(timestamp_now_ms())) ]; let fut = self.req_signed(&url, &params, Method::Get) .map(|raw_resp| deserialize(&raw_resp)) .flatten(); Box::new(fut) } pub fn withdraw(&self, asset: &str, address: &str, address_tag: Option<&str>, amount: f64, name: Option<&str>) -> Box<Future<Item = WithdrawResponse, Error = BnbError>> { let url = WAPI_V3.to_string() + "/withdraw.html"; let params = opt_params_to_string![ ("asset", Some(asset)), ("address", Some(address)), ("addressTag", address_tag), ("amount", Some(amount)), ("name", name), ("timestamp", Some(timestamp_now_ms())) ]; let fut = self.req_signed(&url, &params, Method::Post) .map(|raw_resp| wapi_deserialize::<[WithdrawResponse; 1]>(&raw_resp)) .flatten() .map(|vec| vec[0].clone()); Box::new(fut) } pub fn fetch_deposit_history(&self, asset: Option<&str>, status: Option<i32>, start_time: Option<i64>, end_time: Option<i64>) -> Box<Future<Item = DepositHistory, Error = BnbError>> { let url = WAPI_V3.to_string() + "/depositHistory.html"; let params = opt_params_to_string![ ("asset", asset), ("status", status), ("startTime", start_time), ("endTime", end_time), ("timestamp", Some(timestamp_now_ms())) ]; let fut = self.req_signed(&url, &params, Method::Get) .map(|raw_resp| wapi_deserialize(&raw_resp)) .flatten(); Box::new(fut) } pub fn fetch_withdraw_history(&self, asset: Option<&str>, status: Option<i32>, start_time: Option<i64>, end_time: Option<i64>) -> Box<Future<Item = WithdrawHistory, Error = BnbError>> { let url = WAPI_V3.to_string() + "/withdrawHistory.html"; let params = opt_params_to_string![ ("asset", asset), ("status", status), ("startTime", start_time), ("endTime", end_time), ("timestamp", Some(timestamp_now_ms())) ]; let fut = self.req_signed(&url, &params, Method::Get) .map(|raw_resp| wapi_deserialize(&raw_resp)) .flatten(); Box::new(fut) } pub fn fetch_deposit_address(&self, asset: &str) -> Box<Future<Item = DepositAddressResponse, Error = BnbError>> { let url = WAPI_V3.to_string() + "/depositAddress.html"; let params = format!("asset={}&timestamp={}", asset, timestamp_now_ms()); let fut = self.req_signed(&url, &params, Method::Get) .map(|raw_resp| wapi_deserialize(&raw_resp)) .flatten(); Box::new(fut) } pub fn fetch_account_status(&self) -> Box<Future<Item = AccountStatus, Error = BnbError>> { let url = WAPI_V3.to_string() + "/accountStatus.html"; let params = format!("timestamp={}", timestamp_now_ms()); let fut = self.req_signed(&url, &params, Method::Get) .map(|raw_resp| wapi_deserialize(&raw_resp)) .flatten(); Box::new(fut) } }
#[doc = "Register `TX_MULTIPLE_COLLISION_GOOD_PACKETS` reader"] pub type R = crate::R<TX_MULTIPLE_COLLISION_GOOD_PACKETS_SPEC>; #[doc = "Field `TXMULTCOLG` reader - Tx Multiple Collision Good Packets"] pub type TXMULTCOLG_R = crate::FieldReader<u32>; impl R { #[doc = "Bits 0:31 - Tx Multiple Collision Good Packets"] #[inline(always)] pub fn txmultcolg(&self) -> TXMULTCOLG_R { TXMULTCOLG_R::new(self.bits) } } #[doc = "Tx multiple collision good packets register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tx_multiple_collision_good_packets::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct TX_MULTIPLE_COLLISION_GOOD_PACKETS_SPEC; impl crate::RegisterSpec for TX_MULTIPLE_COLLISION_GOOD_PACKETS_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`tx_multiple_collision_good_packets::R`](R) reader structure"] impl crate::Readable for TX_MULTIPLE_COLLISION_GOOD_PACKETS_SPEC {} #[doc = "`reset()` method sets TX_MULTIPLE_COLLISION_GOOD_PACKETS to value 0"] impl crate::Resettable for TX_MULTIPLE_COLLISION_GOOD_PACKETS_SPEC { const RESET_VALUE: Self::Ux = 0; }
pub const OPENING_TAG: u8 = b'<'; pub const END_OF_TAG: &[u8] = b"</"; pub const SELF_CLOSING: &[u8] = b"/>"; pub const COMMENT: &[u8] = b"--"; pub const ID_ATTR: &[u8] = b"id"; pub const CLASS_ATTR: &[u8] = b"class"; pub const VOID_TAGS: &[&[u8]] = &[ b"area", b"base", b"br", b"col", b"embed", b"hr", b"img", b"input", b"keygen", b"link", b"meta", b"param", b"source", b"track", b"wbr", ];
use crate::{ api::{ extractors::{ auth::Auth, multer::Multer, config::{ default_json_config, default_path_config, default_query_config, avatar_multer_config, }, }, errors::{ApiError, ApiResult, respond}, app_state::{AppConfig, AppDatabase, AppSubscriber, AppSmtp}, fields::{ Username, Password, Email, RoleName, Nickname, Id, Any24, Any6, Cursor as CursorField, }, }, queries::{ errors::Error as QueryError, users::{ User, UserRegistrationPublic, UserEmailUpdatingPublic, }, }, websocket::push_messages::{UserCreated, UserUpdated, TokenRevoked, double_option}, internal_server_error, }; use actix_web::{ web::{self, block}, error::BlockingError, }; use actix_web_validator::{ValidatedJson, ValidatedPath, ValidatedQuery}; use chrono::{DateTime, Utc}; use image::{ GenericImageView, error::ImageError, }; use log::error; use rand::{Rng, thread_rng, distributions::Alphanumeric}; use serde::{Serialize, Deserialize}; use std::{ convert::Infallible, iter, path::{Path, PathBuf}, cmp::Ordering, }; use validator::Validate; use validator_derive::Validate; use cashier_query::generator::{QueryConfig, FieldConfig, escape_unquoted}; use lazy_static::lazy_static; use crate::api::fields::PaginationSize; use crate::queries::users::{UserCursor, UserAll, UserRegisterInfo}; use crate::api::cursor::process_query; use futures::FutureExt; use crate::api::extractors::config::default_confirm_rate_limit; #[derive(Debug, Validate, Deserialize)] struct CreateUserRequest { #[validate] username: Username, #[validate] password: Password, // roles must be a subset of creator's roles #[validate] roles: Vec<RoleName>, #[validate] email: Option<Email>, #[validate] nickname: Option<Nickname>, } #[derive(Debug, Serialize)] struct CreateUserResponse { id: i32, created_at: DateTime<Utc>, } async fn create_user( database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, data: ValidatedJson<CreateUserRequest>, auth: Auth, ) -> ApiResult<CreateUserResponse> { auth.try_permission("user", "create")?; let uid = auth.claims.as_ref().ok_or_else(|| ApiError::MissingAuthorizationHeader)?.uid; let mut roles = data.roles.iter() .map(|x| x.clone().into()) .collect::<Vec<_>>(); roles.dedup(); let extra_roles = database .user_check_extra_roles(uid, &roles[..]) .await .map_err(|e| internal_server_error!(e))?; if !extra_roles.is_empty() { return Err(ApiError::AttemptToElevateRole { roles: extra_roles }); } let email = data.email.as_ref().map(|x| x.clone().into()); let nickname = data.nickname.as_ref().map(|x| x.clone().into()); let user = database .user_insert_one(&data.username[..], &data.password[..], &roles[..], &email, &nickname) .await .map_err(|err| match err { QueryError::DuplicatedUser { field } => ApiError::DuplicatedUser { field }, e => internal_server_error!(e), })?; subscriber.send(UserCreated { id: user.id, username: String::from(data.username.clone()), roles, email, created_at: user.created_at, }, &auth) .await .map_err(|e| internal_server_error!(e))?; respond(CreateUserResponse { id: user.id, created_at: user.created_at, }) } #[derive(Debug, Validate, Deserialize)] struct UidPath { #[validate] uid: Id, } #[derive(Debug, Serialize)] struct UploadAvatarResponse { avatar: String, avatar128: Option<String>, } fn join_avatar_file<P1: AsRef<Path>, P2: AsRef<Path>>(root: P1, path: P2) -> PathBuf { Path::new(root.as_ref()) .join(crate::constants::AVATAR_FOLDER) .join(path.as_ref()) } fn join_avatar_url(url: &str, path: &str) -> String { String::from(url) + "/" + crate::constants::AVATAR_FOLDER + "/" + path } fn remove_avatar_file<P1: AsRef<Path>, P2: AsRef<Path>>(root: P1, path: P2) { if let Err(e) = std::fs::remove_file(join_avatar_file(root, path)) { error!("failed to remove file {}", e); } } async fn upload_avatar_impl( config: web::Data<AppConfig>, database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, uid: i32, data: Multer, auth: &Auth, ) -> ApiResult<UploadAvatarResponse> { // Fetch old avatars let old_avatars = database .user_fetch_avatars(uid) .await .map_err(|err| match err { QueryError::UserNotFound => ApiError::UserNotFound, e => internal_server_error!(e), })?; // Crop and resize new avatars let root = config.config.media.root.clone(); let (avatar, avatar128) = block(move || { let content = data.get_single("avatar").extra().content().unwrap(); let avatar = image::load_from_memory(content)?; let (width, height) = avatar.dimensions(); let size = std::cmp::min(width, height); let cropped_avatar = match width.cmp(&height) { Ordering::Less => avatar.crop_imm(0, (height - size) / 2, size, size), Ordering::Greater => avatar.crop_imm((width - size) / 2, 0, size, size), Ordering::Equal => avatar, }; let mut rng = thread_rng(); let filename = iter::repeat(()) .map(|_| rng.sample(Alphanumeric)) .take(crate::constants::AVATAR_FILENAME_LENGTH) .collect::<String>(); let origin_filename = format!("{}.{}x{}.png", filename, size, size); cropped_avatar.save_with_format(join_avatar_file(&root, &origin_filename), image::ImageFormat::Png)?; let thumbnail_filename = if size <= 128 { None } else { let thumbnail = cropped_avatar.resize(128, 128,image::imageops::FilterType::Triangle); let thumbnail_filename = filename + ".thumb.128x128.png"; thumbnail.save_with_format(join_avatar_file(&root, &thumbnail_filename), image::ImageFormat::Png) .map_err(|e| { remove_avatar_file(&root, &origin_filename); e })?; Some(thumbnail_filename) }; Ok((origin_filename, thumbnail_filename)) }) .await .map_err(|err| match err { BlockingError::Error(ImageError::Decoding(_)) => ApiError::AvatarError { error: "cannot decode the uploaded avatar".into(), }, e => internal_server_error!(e), })?; // Save new avatars to database let updated_at = match database .user_update_avatars(uid, &Some(avatar.clone()), &avatar128) .await { Ok(v) => v, Err(e) => { let avatar = avatar.clone(); let avatar128 = avatar128.clone(); let root = config.config.media.root.clone(); block(move || { remove_avatar_file(&root, &avatar); if let Some(avatar128) = avatar128 { remove_avatar_file(&root, &avatar128); } Ok::<(), Infallible>(()) }) .await .map_err(|e| internal_server_error!(e))?; return Err(match e { QueryError::UserNotFound => ApiError::UserNotFound, e => internal_server_error!(e), }); } }; let root = &config.config.media.root; // Remove old avatars if let Some(old_avatar) = old_avatars.avatar.as_ref() { remove_avatar_file(root, old_avatar); } if let Some(old_avatar128) = old_avatars.avatar128.as_ref() { remove_avatar_file(root, old_avatar128); } let url = &config.config.media.url; let avatar = join_avatar_url(url, &avatar); let avatar128 = avatar128.map(|x| join_avatar_url(url, &x)); subscriber.send(UserUpdated { id: uid, username: None, email: None, password: None, nickname: None, avatar: Some(Some(avatar.clone())), avatar128: Some(avatar128.clone()), blocked: None, updated_at, }, auth) .await .map_err(|e| internal_server_error!(e))?; respond(UploadAvatarResponse { avatar, avatar128, }) } async fn upload_avatar_for_me( config: web::Data<AppConfig>, database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, data: Multer, auth: Auth, ) -> ApiResult<UploadAvatarResponse> { auth.try_permission("user-avatar", "update-self")?; let uid = auth.claims.as_ref().ok_or_else(|| ApiError::MissingAuthorizationHeader)?.uid; upload_avatar_impl(config, database, subscriber, uid, data, &auth).await } async fn upload_avatar( config: web::Data<AppConfig>, database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, uid_path: ValidatedPath<UidPath>, data: Multer, auth: Auth, ) -> ApiResult<UploadAvatarResponse> { auth.try_permission("user-avatar", "update")?; let uid = uid_path.uid.clone().into(); upload_avatar_impl(config, database, subscriber, uid, data, &auth).await } async fn delete_avatar_impl( config: web::Data<AppConfig>, database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, uid: i32, auth: &Auth, ) -> ApiResult<()> { // Fetch old avatars let old_avatars = database .user_fetch_avatars(uid) .await .map_err(|err| match err { QueryError::UserNotFound => ApiError::UserNotFound, e => internal_server_error!(e), })?; // Save new avatars to database let updated_at = database .user_update_avatars(uid, &None, &None) .await .map_err(|err| match err { QueryError::UserNotFound => ApiError::UserNotFound, e => internal_server_error!(e), })?; let root = &config.config.media.root; // Remove old avatars if let Some(old_avatar) = old_avatars.avatar.as_ref() { remove_avatar_file(root, old_avatar); } if let Some(old_avatar128) = old_avatars.avatar128.as_ref() { remove_avatar_file(root, old_avatar128); } subscriber.send(UserUpdated { id: uid, username: None, email: None, password: None, nickname: None, avatar: Some(None), avatar128: Some(None), blocked: None, updated_at, }, &auth) .await .map_err(|e| internal_server_error!(e))?; respond(()) } async fn delete_avatar_for_me( config: web::Data<AppConfig>, database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, auth: Auth, ) -> ApiResult<()> { auth.try_permission("user-avatar", "delete-self")?; let uid = auth.claims.as_ref().ok_or_else(|| ApiError::MissingAuthorizationHeader)?.uid; delete_avatar_impl(config, database, subscriber, uid, &auth).await } async fn delete_avatar( config: web::Data<AppConfig>, database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, uid_path: ValidatedPath<UidPath>, auth: Auth, ) -> ApiResult<()> { auth.try_permission("user-avatar", "update")?; let uid = uid_path.uid.clone().into(); delete_avatar_impl(config, database, subscriber, uid, &auth).await } #[derive(Serialize, Debug)] struct ReadUserResponse { user: User, } async fn read_user_impl( config: web::Data<AppConfig>, database: web::Data<AppDatabase>, uid: i32, ) -> ApiResult<ReadUserResponse> { let mut user = User::All(database .user_find_one(uid) .await .map_err(|err| match err { QueryError::UserNotFound => ApiError::UserNotFound, e => internal_server_error!(e), })?); let media_url = &config.config.media.url; user.map_avatars(|x| join_avatar_url(media_url, x)); respond(ReadUserResponse { user }) } async fn read_user_for_me( config: web::Data<AppConfig>, database: web::Data<AppDatabase>, auth: Auth, ) -> ApiResult<ReadUserResponse> { auth.try_permission("user", "read-self")?; let uid = auth.claims.ok_or_else(|| ApiError::MissingAuthorizationHeader)?.uid; read_user_impl(config, database, uid).await } async fn read_user( config: web::Data<AppConfig>, database: web::Data<AppDatabase>, uid_path: ValidatedPath<UidPath>, auth: Auth, ) -> ApiResult<ReadUserResponse> { auth.try_permission("user", "read")?; let uid = uid_path.uid.clone().into(); read_user_impl(config, database, uid).await } async fn read_user_public( config: web::Data<AppConfig>, database: web::Data<AppDatabase>, uid_path: ValidatedPath<UidPath>, auth: Auth, ) -> ApiResult<ReadUserResponse> { auth.try_permission("user-public", "read")?; let uid = uid_path.uid.clone().into(); let mut user = User::Public(database .user_find_one_public(uid) .await .map_err(|err| match err { QueryError::UserNotFound => ApiError::UserNotFound, e => internal_server_error!(e), })?); let media_url = &config.config.media.url; user.map_avatars(|x| join_avatar_url(media_url, x)); respond(ReadUserResponse { user }) } #[derive(Debug, Validate, Deserialize)] struct RegisterUserRequest { #[validate] username: Username, #[validate] email: Email, #[validate] password: Password, } #[derive(Debug, Serialize)] struct RegisterUserResponse { id: String, created_at: DateTime<Utc>, expires_at: DateTime<Utc>, } async fn register_user( config: web::Data<AppConfig>, database: web::Data<AppDatabase>, smtp: web::Data<AppSmtp>, request: ValidatedJson<RegisterUserRequest>, auth: Auth, ) -> ApiResult<RegisterUserResponse> { auth.try_permission("registration", "create")?; let result = database .user_register(smtp, &config.config.smtp.sender, &config.config.site, UserRegisterInfo { username: &request.username[..], email: &request.email[..], password: &request.password[..], }) .await .map_err(|err| match err { QueryError::DuplicatedUser { field } => ApiError::DuplicatedUser { field }, e => internal_server_error!(e), })?; respond(RegisterUserResponse { id: result.id, created_at: result.created_at, expires_at: result.expires_at, }) } #[derive(Debug, Validate, Deserialize)] struct RegIdPath { #[validate] reg_id: Any24, } #[derive(Debug, Validate, Deserialize)] struct ConfirmRegistrationRequest { #[validate] code: Any6, } async fn confirm_registration_impl( database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, reg_id: &str, code: &Option<String>, auth: &Auth, ) -> ApiResult<()> { let result = database .user_confirm_registration(reg_id, code) .await .map_err(|err| match err { QueryError::UserRegistrationNotFound => ApiError::UserRegistration { reason: "NotFound".into() }, QueryError::UserRegistrationExpired => ApiError::UserRegistration { reason: "Expired".into() }, QueryError::UserRegistrationWrongCode => ApiError::UserRegistration { reason: "WrongCode".into() }, QueryError::DuplicatedUser { field } => ApiError::DuplicatedUser { field }, e => internal_server_error!(e), })?; subscriber.send(UserCreated { id: result.id, username: result.username, roles: result.roles, email: result.email, created_at: result.created_at, }, auth) .await .map_err(|e| internal_server_error!(e))?; respond(()) } async fn confirm_registration( database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, request: ValidatedJson<ConfirmRegistrationRequest>, path: ValidatedPath<RegIdPath>, auth: Auth, ) -> ApiResult<()> { auth.try_permission("registration", "confirm")?; confirm_registration_impl(database, subscriber, &path.reg_id[..], &Some(request.code.clone().into()), &auth).await } async fn confirm_registration_for_others( database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, path: ValidatedPath<RegIdPath>, auth: Auth, ) -> ApiResult<()> { auth.try_permission("registration", "confirm-others")?; confirm_registration_impl(database, subscriber, &path.reg_id[..], &None, &auth).await } #[derive(Debug, Validate, Deserialize)] struct CheckUsernameExistenceRequest { #[validate] username: Username, } #[derive(Debug, Validate, Deserialize)] struct CheckEmailExistenceRequest { #[validate] email: Email, } #[derive(Debug, Serialize)] struct CheckExistenceResponse { exists: bool, } async fn check_username_existence( database: web::Data<AppDatabase>, request: ValidatedQuery<CheckUsernameExistenceRequest>, auth: Auth, ) -> ApiResult<CheckExistenceResponse> { auth.try_permission("user-username", "check-existence")?; let exists = database .user_check_username_existence(&request.username[..]) .await .map_err(|e| internal_server_error!(e))?; respond(CheckExistenceResponse { exists }) } async fn check_email_existence( database: web::Data<AppDatabase>, request: ValidatedQuery<CheckEmailExistenceRequest>, auth: Auth, ) -> ApiResult<CheckExistenceResponse> { auth.try_permission("user-username", "check-existence")?; let exists = database .user_check_email_existence(&request.email[..]) .await .map_err(|e| internal_server_error!(e))?; respond(CheckExistenceResponse { exists }) } #[derive(Debug, Serialize)] #[serde(tag = "status")] enum QueryRegistrationResponse { NotFound, Expired, Processing(UserRegistrationPublic), Passed(UserRegistrationPublic), Rejected(UserRegistrationPublic), } async fn query_registration( database: web::Data<AppDatabase>, path: ValidatedPath<RegIdPath>, auth: Auth, ) -> ApiResult<QueryRegistrationResponse> { auth.try_permission("registration", "read")?; let result = match database .user_query_registration(&path.reg_id[..]) .await { Ok(value) => match value.completed { Some(true) => QueryRegistrationResponse::Passed(value), Some(false) => QueryRegistrationResponse::Rejected(value), None => QueryRegistrationResponse::Processing(value), }, Err(QueryError::UserRegistrationExpired) => QueryRegistrationResponse::Expired, Err(QueryError::UserRegistrationNotFound) => QueryRegistrationResponse::NotFound, Err(e) => return Err(internal_server_error!(e)), }; respond(result) } async fn resend_registration_email( config: web::Data<AppConfig>, database: web::Data<AppDatabase>, smtp: web::Data<AppSmtp>, path: ValidatedPath<RegIdPath>, auth: Auth, ) -> ApiResult<()> { auth.try_permission("registration", "resend")?; database .user_resend_registration_email( smtp, &config.config.smtp.sender, &config.config.site, &path.reg_id[..]) .await .map_err(|err| match err { QueryError::UserRegistrationNotFound => ApiError::UserRegistration { reason: "NotFound".into() }, QueryError::UserRegistrationExpired => ApiError::UserRegistration { reason: "Expired".into() }, e => internal_server_error!(e), })?; respond(()) } #[derive(Debug, Validate, Deserialize)] #[serde(rename_all = "camelCase")] struct UpdateUserRequest { #[validate] #[serde(default, skip_serializing_if = "Option::is_none")] pub username: Option<Username>, #[validate] #[serde(deserialize_with = "double_option")] #[serde(default, skip_serializing_if = "Option::is_none")] pub email: Option<Option<Email>>, #[validate] #[serde(deserialize_with = "double_option")] #[serde(default, skip_serializing_if = "Option::is_none")] pub nickname: Option<Option<Nickname>>, #[serde(deserialize_with = "double_option")] #[serde(default, skip_serializing_if = "Option::is_none")] pub blocked: Option<Option<bool>>, } #[derive(Debug, Validate, Deserialize)] #[serde(rename_all = "camelCase")] struct UpdateSelfRequest { #[validate] #[serde(default, skip_serializing_if = "Option::is_none")] pub username: Option<Username>, #[validate] #[serde(deserialize_with = "double_option")] #[serde(default, skip_serializing_if = "Option::is_none")] pub nickname: Option<Option<Nickname>>, } struct UpdateUserInfo { uid: i32, username: Option<Username>, email: Option<Option<Email>>, nickname: Option<Option<Nickname>>, blocked: Option<Option<bool>>, } async fn update_user_impl( database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, auth: &Auth, info: UpdateUserInfo, ) -> ApiResult<()> { let UpdateUserInfo { uid, username, email, nickname, blocked } = info; let username = username.map(|x| x.into()); let email = email.map(|x| x.map(|x| x.into())); let nickname = nickname.map(|x| x.map(|x| x.into())); let updated_at = database .user_update(info.uid, &username, &email, &nickname, &blocked) .await .map_err(|err| match err { QueryError::UserNotFound => ApiError::UserNotFound, QueryError::DuplicatedUser { field } => ApiError::DuplicatedUser { field }, e => internal_server_error!(e), })?; subscriber.send(UserUpdated { id: uid, username, email, password: None, nickname, avatar: None, avatar128: None, blocked, updated_at, }, auth) .await .map_err(|e| internal_server_error!(e))?; respond(()) } async fn update_user_for_me( database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, request: ValidatedJson<UpdateSelfRequest>, auth: Auth, ) -> ApiResult<()> { auth.try_permission("user", "update-self")?; let uid = auth.claims.as_ref().ok_or_else(|| ApiError::MissingAuthorizationHeader)?.uid; update_user_impl(database, subscriber, &auth, UpdateUserInfo { uid, username: request.username.clone(), email: None, nickname: request.nickname.clone(), blocked: None, }).await } async fn update_user( database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, uid_path: ValidatedPath<UidPath>, request: ValidatedJson<UpdateUserRequest>, auth: Auth, ) -> ApiResult<()> { auth.try_permission("user", "update")?; let uid = uid_path.uid.clone().into(); update_user_impl(database, subscriber, &auth, UpdateUserInfo { uid, username: request.username.clone(), email: request.email.clone(), nickname: request.nickname.clone(), blocked: request.blocked, }).await } #[derive(Debug, Validate, Deserialize)] #[serde(rename_all = "camelCase")] struct UpdateEmailRequest { #[validate] pub email: Email, } #[derive(Debug, Serialize)] struct UpdateEmailResponse { id: String, created_at: DateTime<Utc>, expires_at: DateTime<Utc>, } async fn update_user_email( config: web::Data<AppConfig>, database: web::Data<AppDatabase>, smtp: web::Data<AppSmtp>, request: ValidatedJson<UpdateEmailRequest>, auth: Auth, ) -> ApiResult<UpdateEmailResponse> { auth.try_permission("user-email-updating", "create")?; let uid = auth.claims.as_ref().ok_or_else(|| ApiError::MissingAuthorizationHeader)?.uid; let result = database .user_update_email( smtp, &config.config.smtp.sender, &config.config.site, uid, &request.email[..], ) .await .map_err(|err| match err { QueryError::UserNotFound => ApiError::UserNotFound, QueryError::DuplicatedUser { field } => ApiError::DuplicatedUser { field }, e => internal_server_error!(e), })?; respond(UpdateEmailResponse { id: result.id, created_at: result.created_at, expires_at: result.expires_at, }) } #[derive(Debug, Validate, Deserialize)] struct UpdateIdPath { #[validate] update_id: Any24, } #[derive(Debug, Serialize)] #[serde(tag = "status")] enum QueryEmailUpdatingResponse { NotFound, Expired, Processing(UserEmailUpdatingPublic), Passed(UserEmailUpdatingPublic), Rejected(UserEmailUpdatingPublic), } async fn query_email_updating_impl( database: web::Data<AppDatabase>, updated_id: &str, uid: Option<i32> ) -> ApiResult<QueryEmailUpdatingResponse> { let result = match database .user_query_email_updating(updated_id, uid) .await { Ok(value) => { match value.completed { Some(true) => QueryEmailUpdatingResponse::Passed(value), Some(false) => QueryEmailUpdatingResponse::Rejected(value), None => QueryEmailUpdatingResponse::Processing(value), } }, Err(QueryError::UserNotMatch) => return Err(ApiError::PermissionDenied { subject: "user-email-updating".into(), action: "read".into(), }), Err(QueryError::UserEmailUpdatingExpired) => QueryEmailUpdatingResponse::Expired, Err(QueryError::UserEmailUpdatingNotFound) => QueryEmailUpdatingResponse::NotFound, Err(e) => return Err(internal_server_error!(e)), }; respond(result) } async fn query_email_updating( database: web::Data<AppDatabase>, path: ValidatedPath<UpdateIdPath>, auth: Auth, ) -> ApiResult<QueryEmailUpdatingResponse> { auth.try_permission("user-email-updating", "read")?; let uid = auth.claims.as_ref().ok_or_else(|| ApiError::MissingAuthorizationHeader)?.uid; query_email_updating_impl(database, &path.update_id[..], Some(uid)).await } async fn query_email_updating_for_others( database: web::Data<AppDatabase>, path: ValidatedPath<UpdateIdPath>, auth: Auth, ) -> ApiResult<QueryEmailUpdatingResponse> { auth.try_permission("user-email-updating", "read-others")?; query_email_updating_impl(database, &path.update_id[..], None).await } #[derive(Debug, Validate, Deserialize)] #[serde(rename_all = "camelCase")] struct ConfirmEmailUpdatingRequest { #[validate] code: Any6, } async fn confirm_email_updating_impl( database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, auth: &Auth, update_id: &str, code: &Option<String>, uid: &Option<i32> ) -> ApiResult<()> { let result = database .user_confirm_email_updating(update_id, code, uid) .await .map_err(|err| match err { QueryError::UserEmailUpdatingNotFound => ApiError::UserEmailUpdating { reason: "NotFound".into() }, QueryError::UserEmailUpdatingExpired => ApiError::UserEmailUpdating { reason: "Expired".into() }, QueryError::UserEmailUpdatingWrongCode => ApiError::UserEmailUpdating { reason: "WrongCode".into() }, QueryError::DuplicatedUser { field } => ApiError::DuplicatedUser { field }, QueryError::UserNotFound => ApiError::UserNotFound, QueryError::UserNotMatch => ApiError::PermissionDenied { subject: "user-email-updating".into(), action: "confirm".into(), }, e => internal_server_error!(e), })?; subscriber.send(UserUpdated { id: result.id, username: None, email: Some(Some(result.email)), password: None, nickname: None, avatar: None, avatar128: None, blocked: None, updated_at: result.updated_at, }, auth) .await .map_err(|e| internal_server_error!(e))?; respond(()) } async fn confirm_email_updating( database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, request: ValidatedJson<ConfirmEmailUpdatingRequest>, path: ValidatedPath<UpdateIdPath>, auth: Auth, ) -> ApiResult<()> { auth.try_permission("user-email-updating", "confirm")?; let uid = auth.claims.as_ref().ok_or_else(|| ApiError::MissingAuthorizationHeader)?.uid; confirm_email_updating_impl(database, subscriber, &auth, &path.update_id[..], &Some(request.code.clone().into()), &Some(uid)).await } async fn confirm_email_updating_for_others( database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, path: ValidatedPath<UpdateIdPath>, auth: Auth, ) -> ApiResult<()> { auth.try_permission("user-email-updating", "confirm-others")?; confirm_email_updating_impl(database, subscriber, &auth, &path.update_id[..], &None, &None).await } async fn resend_email_updating_email_impl( config: web::Data<AppConfig>, database: web::Data<AppDatabase>, smtp: web::Data<AppSmtp>, update_id: &str, uid: Option<i32>, ) -> ApiResult<()> { database .user_resend_email_updating_email( smtp, &config.config.smtp.sender, &config.config.site, update_id, uid) .await .map_err(|err| match err { QueryError::UserEmailUpdatingNotFound => ApiError::UserEmailUpdating { reason: "NotFound".into() }, QueryError::UserEmailUpdatingExpired => ApiError::UserEmailUpdating { reason: "Expired".into() }, QueryError::UserNotMatch => ApiError::PermissionDenied { subject: "user-email-updating".into(), action: "resend".into(), }, e => internal_server_error!(e), })?; respond(()) } async fn resend_email_updating_email( config: web::Data<AppConfig>, database: web::Data<AppDatabase>, smtp: web::Data<AppSmtp>, path: ValidatedPath<UpdateIdPath>, auth: Auth, ) -> ApiResult<()> { auth.try_permission("user-email-updating", "resend")?; let uid = auth.claims.as_ref().ok_or_else(|| ApiError::MissingAuthorizationHeader)?.uid; resend_email_updating_email_impl(config, database, smtp, &path.update_id[..], Some(uid)).await } async fn resend_email_updating_email_for_others( config: web::Data<AppConfig>, database: web::Data<AppDatabase>, smtp: web::Data<AppSmtp>, path: ValidatedPath<UpdateIdPath>, auth: Auth, ) -> ApiResult<()> { auth.try_permission("user-email-updating", "resend-others")?; resend_email_updating_email_impl(config, database, smtp, &path.update_id[..], None).await } async fn update_password_impl( database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, auth: &Auth, uid: i32, password: String, old_password: Option<String>, ) -> ApiResult<()> { let updated_at = database .user_update_password(uid, password, old_password) .await .map_err(|err| match err { QueryError::UserNotFound => ApiError::UserNotFound, QueryError::WrongPassword => ApiError::WrongUserOrPassword, e => internal_server_error!(e), })?; let results = database .token_revoke_by_user(uid) .await .map_err(|e| internal_server_error!(e))?; subscriber.send_all( iter::once(UserUpdated { id: uid, username: None, email: None, password: Some(()), nickname: None, avatar: None, avatar128: None, blocked: None, updated_at, }.into()).chain( results.into_iter() .map(|result| TokenRevoked { jti: result.id, uid: result.user, }.into()) ) .collect(), auth ) .await .map_err(|e| internal_server_error!(e))?; respond(()) } #[derive(Debug, Validate, Deserialize)] #[serde(rename_all = "camelCase")] struct UpdatePasswordForMeRequest { #[validate] password: Password, #[validate] old_password: Password } async fn update_password_for_me( database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, request: ValidatedJson<UpdatePasswordForMeRequest>, auth: Auth, ) -> ApiResult<()> { auth.try_permission("user-password", "update-self")?; let uid = auth.claims.as_ref().ok_or_else(|| ApiError::MissingAuthorizationHeader)?.uid; update_password_impl(database, subscriber, &auth, uid, request.password.clone().into(), Some(request.old_password.clone().into())).await } #[derive(Debug, Validate, Deserialize)] #[serde(rename_all = "camelCase")] struct UpdatePasswordRequest { #[validate] password: Password, } async fn update_password( database: web::Data<AppDatabase>, subscriber: web::Data<AppSubscriber>, uid_path: ValidatedPath<UidPath>, request: ValidatedJson<UpdatePasswordRequest>, auth: Auth, ) -> ApiResult<()> { auth.try_permission("user-password", "update")?; let uid = uid_path.uid.clone().into(); update_password_impl(database, subscriber, &auth, uid, request.password.clone().into(), None).await } lazy_static! { static ref LIST_USER_GENERATOR: QueryConfig = QueryConfig::new() .field(FieldConfig::new_number_field::<i32>("id", Some("\"user\".id".into()))) .field(FieldConfig::new_string_field("username", None)) .field(FieldConfig::new_string_field("email", None)) .field(FieldConfig::new_string_field("nickname", None)) .field(FieldConfig::new("avatar") .partial_equal() .use_like()) .field(FieldConfig::new("avatar128") .partial_equal() .use_like()) .field(FieldConfig::new_number_field::<bool>("blocked", None)) .field(FieldConfig::new_date_time_field("createdAt", Some("\"user\".created_at".into()))) .field(FieldConfig::new_date_time_field("updatedAt", Some("\"user\".updated_at".into()))) .field(FieldConfig::new("role") .partial_equal() .escape_handler(escape_unquoted::<i32>())); } #[derive(Debug, Validate, Deserialize)] struct ListUserRequest { #[validate] before: Option<CursorField>, #[validate] after: Option<CursorField>, #[validate] #[serde(default)] size: PaginationSize, sort: Option<String>, #[serde(default)] desc: bool, #[serde(default)] query: String, } #[derive(Debug, Serialize)] struct ListUserResponse { results: Vec<UserCursor>, } async fn list_user( database: web::Data<AppDatabase>, request: ValidatedQuery<ListUserRequest>, auth: Auth, ) -> ApiResult<ListUserResponse> { auth.try_permission("user", "list")?; let size = usize::from(request.size.clone()); let users = process_query( &LIST_USER_GENERATOR, &request.before, &request.after, &request.sort, request.desc, &request.query, Box::new(move |condition, order_by, ordered_columns| async move { let statement = format!( "SELECT \"user\".id, username, email, nickname, avatar, avatar128, \ blocked, \"user\".created_at, \"user\".updated_at, ARRAY_AGG(role.id) as roles \ FROM ( \ SELECT DISTINCT {} FROM \"user\", user_role, role \ WHERE {} AND (NOT \"user\".deleted AND user_role.user = \"user\".id AND \ user_role.role = role.id AND NOT role.deleted) \ ORDER BY {} LIMIT {} \ ) AS temp, \"user\", user_role, role \ WHERE \"user\".id = temp.id AND NOT \"user\".deleted AND user_role.user = \"user\".id \ AND user_role.role = role.id AND NOT role.deleted \ GROUP BY \"user\".id \ ORDER BY {}", ordered_columns, condition, order_by, size, order_by); Ok(database.db.read().await .query(&statement[..], &[]) .await .map_err(|e| internal_server_error!(e))?) }.boxed_local()) ).await?.iter() .map(UserAll::from) .collect::<Vec<_>>(); let results = users.into_iter() .map(|user| { UserCursor::try_from_user(User::All(user), &request.sort) }) .collect::<Result<Vec<_>, _>>()?; respond(ListUserResponse { results }) } pub fn users_api( config: &web::Data<AppConfig>, database: &web::Data<AppDatabase>, subscriber: &web::Data<AppSubscriber>, smtp: &web::Data<AppSmtp>, ) -> Box<dyn FnOnce(&mut web::ServiceConfig)> { if let Err(e) = std::fs::create_dir_all(Path::new(&config.config.media.root) .join(crate::constants::AVATAR_FOLDER)) { error!("failed to create directory {}", e); } let config = config.clone(); let database = database.clone(); let subscriber = subscriber.clone(); let smtp = smtp.clone(); Box::new(move |cfg| { cfg.service( web::scope("registrations") .service( web::scope("/{reg_id}/confirm") .wrap(default_confirm_rate_limit(database.clone())) .route("", web::post().to(confirm_registration)) ) .route("/{reg_id}/resend", web::post().to(resend_registration_email)) .route("/{reg_id}", web::get().to(query_registration)) .route("", web::post().to(register_user)) ).service( web::scope("registrations-others") .route("/{reg_id}/confirm", web::post().to(confirm_registration_for_others)) ).service( web::scope("email-updating") .service( web::scope("/{update_id}/confirm") .wrap(default_confirm_rate_limit(database.clone())) .route("", web::post().to(confirm_email_updating)) ) .route("/{update_id}/resend", web::post().to(resend_email_updating_email)) .route("/{update_id}", web::get().to(query_email_updating)) .route("", web::post().to(update_user_email)) ).service( web::scope("email-updating-others") .route("/{update_id}/confirm", web::post().to(confirm_email_updating_for_others)) .route("/{update_id}/resend", web::post().to(resend_email_updating_email_for_others)) .route("/{update_id}", web::get().to(query_email_updating_for_others)) ).service( web::scope("users") .route("/check-username-existence", web::get().to(check_username_existence)) .route("/check-email-existence", web::get().to(check_email_existence)) .service( web::scope("/me/avatar") .app_data(config.clone()) .app_data(database.clone()) .app_data(subscriber.clone()) .app_data(smtp.clone()) .app_data(default_json_config()) .app_data(default_path_config()) .app_data(default_query_config()) .app_data(avatar_multer_config()) .route("", web::post().to(upload_avatar_for_me)) .route("", web::delete().to(delete_avatar_for_me)) ) .route("/me/password", web::post().to(update_password_for_me)) .route("/me", web::get().to(read_user_for_me)) .route("/me", web::patch().to(update_user_for_me)) // .route("/me", web::delete().to(index)) .service( web::scope("/{uid}/avatar") .app_data(config.clone()) .app_data(database.clone()) .app_data(subscriber.clone()) .app_data(smtp) .app_data(default_json_config()) .app_data(default_path_config()) .app_data(default_query_config()) .app_data(default_path_config()) .app_data(avatar_multer_config()) .route("", web::post().to(upload_avatar)) .route("", web::delete().to(delete_avatar)) ) .route("/{uid}/password", web::post().to(update_password)) // .route("/{uid}/roles", web::post().to(index)) .route("/{uid}", web::get().to(read_user)) .route("/{uid}", web::patch().to(update_user)) // .route("/{uid}", web::delete().to(index)) .route("", web::post().to(create_user)) .route("", web::get().to(list_user)) ).service( web::scope("public-users") .route("/{uid}", web::get().to(read_user_public)) // .route("", web::get().to(index)) ); }) }
use std::error::Error; use serde::Deserialize; #[derive(Deserialize)] pub enum AssociationType { #[serde(alias = "bind")] Bind, #[serde(alias = "connect")] Connect, } impl Default for AssociationType { fn default() -> Self { AssociationType::Bind } } #[derive(Default, Deserialize)] pub struct SocketParameters<'a> { pub address: &'a str, pub socket_type: SocketType, pub association_type: AssociationType, pub socket_id: Option<&'a str>, pub topic: Option<&'a str>, } #[derive(Deserialize)] #[allow(non_camel_case_types)] pub enum SocketType { PUB, SUB, REQ, REP, PUSH, PULL, PAIR, ROUTER, DEALER, } impl Default for SocketType { fn default() -> Self { SocketType::PAIR } } impl SocketType { pub fn default_association(&self) -> AssociationType { match self { Self::PUB => AssociationType::Bind, Self::SUB => AssociationType::Connect, Self::REQ => AssociationType::Connect, Self::REP => AssociationType::Bind, Self::PUSH => AssociationType::Connect, Self::PULL => AssociationType::Bind, Self::PAIR => AssociationType::Bind, Self::ROUTER => AssociationType::Bind, Self::DEALER => AssociationType::Bind, } } } impl std::convert::From<SocketType> for &str { fn from(s: SocketType) -> Self { (&s).into() } } impl std::convert::From<&SocketType> for &str { fn from(s: &SocketType) -> Self { match s { SocketType::PUB => "PUB", SocketType::SUB => "SUB", SocketType::REQ => "REQ", SocketType::REP => "REP", SocketType::PUSH => "PUSH", SocketType::PULL => "PULL", SocketType::PAIR => "PAIR", SocketType::ROUTER => "ROUTER", SocketType::DEALER => "DEALER", } } } impl std::convert::From<&str> for SocketType { fn from(s: &str) -> Self { match s { "PUB" => SocketType::PUB, "SUB" => SocketType::SUB, "REQ" => SocketType::REQ, "REP" => SocketType::REP, "PUSH" => SocketType::PUSH, "PULL" => SocketType::PULL, "PAIR" => SocketType::PAIR, "ROUTER" => SocketType::ROUTER, "DEALER" => SocketType::DEALER, _ => SocketType::PAIR, } } } impl std::fmt::Display for SocketType { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str(self.into()) } } pub fn create_socket(ctx: &zmq::Context, parameters: &SocketParameters) -> Result<zmq::Socket, Box<dyn Error>> { println!("Socket type: {}", parameters.socket_type); let socket = ctx.socket(match parameters.socket_type { SocketType::PUB => zmq::PUB, SocketType::SUB => zmq::SUB, SocketType::PUSH => zmq::PUSH, SocketType::PULL => zmq::PULL, SocketType::PAIR => zmq::PAIR, SocketType::REQ => zmq::REQ, SocketType::REP => zmq::REP, SocketType::ROUTER => zmq::ROUTER, SocketType::DEALER => zmq::DEALER, })?; if let Some(id) = parameters.socket_id { socket.set_identity(id.as_bytes())?; } let _ = socket.set_subscribe(parameters.topic.unwrap_or("").as_bytes()); match parameters.association_type { AssociationType::Connect => socket.connect(parameters.address)?, AssociationType::Bind => socket.bind(parameters.address)?, }; Ok(socket) } pub fn parse(json: &str) -> SocketParameters { serde_json::from_str(json).unwrap() //.unwrap_or_else(|_| SocketParameters::default()) } #[cfg(test)] mod test { use super::*; #[test] fn parsing_socket_parameters_from_json() { let json = r#"{ "address": "tcp://localhost:5559", "socket_type": "PULL", "association_type": "bind", "socket_id": "ID1", "topic": "TOPIC1" }"#; let parsed = parse(json); assert_eq!(Some("TOPIC1"), parsed.topic); } }
/*! The build script has two primary jobs: 1. Do code generation. Currently, this consists of turning `data/winver.json` into an appropriate `enum`. 2. Tell Cargo to link against Clang. */ extern crate itertools; extern crate serde; use std::env; use std::fs; use std::io; use std::io::prelude::*; use std::path::{Path, PathBuf}; use itertools::Itertools; /// `pj` as in "path join". macro_rules! pj { ($p0:expr, $($ps:expr),*) => { { let mut pb = PathBuf::from($p0); $(pb.push($ps);)* pb } } } fn main() { let self_path = pj!(get_manifest_dir(), "build.rs"); let gen_path = make_gen_dir(); let data_path = get_data_dir(); { let winver_rs = pj!(&gen_path, "winver.rs"); let winver_json = pj!(&data_path, "winver.json"); if is_target_stale(&winver_rs, &[&self_path, &winver_json]) { let mut f = fs::File::create(winver_rs).ok().expect("create winver.rs"); gen_winver_enum(&mut f, &read_file_str(winver_json)); f.flush().unwrap(); } } link_clang(); } fn make_gen_dir() -> PathBuf { let gen_path = pj!(get_out_dir(), "src"); let _ = fs::create_dir(&gen_path).ok(); gen_path } fn get_data_dir() -> PathBuf { pj!(get_manifest_dir(), "data") } #[cfg(windows)] fn is_target_stale<P0, P1>(target: P0, dep_paths: &[P1]) -> bool where P0: AsRef<Path>, P1: AsRef<Path> { use std::os::windows::fs::MetadataExt; let target_ts = fs::metadata(&target).map(|md| md.last_write_time()).unwrap_or(0); dep_paths.iter().any(|dp| fs::metadata(dp).map(|md| md.last_write_time()).unwrap_or(1) > target_ts) } fn gen_winver_enum<W>(out: &mut W, json_str: &str) where W: io::Write { use std::collections::HashMap; use serde::json; println!("# gen_winver_enum(..)"); let root: HashMap<String, String> = json::from_str(json_str).unwrap(); // First, parse those version numbers! let root: HashMap<_, u32> = root.into_iter() .map(|(k, v)| (k, parse_int(&v))) .collect(); // Make the map of "primary" names. let primary: HashMap<&str, u32> = root.iter() .filter(|&(ref k, _)| !k.starts_with("*")) .map(|(k, v)| (&**k, *v)) .collect(); // Make the reverse lookup map. let reverse: HashMap<u32, &str> = primary.iter().map(|(k, v)| (*v, *k)).collect(); assert_eq!(primary.len(), reverse.len()); // Make the map of non-primary aliases. let mut aliases: Vec<(&str, u32)> = root.iter().filter(|&(k, _)| k.starts_with("*")) .map(|(k, v)| (&k[1..], *v)).collect(); aliases.sort(); // Get a sorted list of versions. let mut vers: Vec<u32> = primary.values().cloned().collect(); vers.sort(); // Use that to get a "next" version map. let next_ver_iter = vers.iter().cloned().skip(1).chain(Some(vers.last().unwrap() + 1).into_iter()); let next_ver: HashMap<u32, u32> = vers.iter().cloned().zip(next_ver_iter).collect(); // Generate the enum. write!(out, r#" #[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)] #[repr(u32)] pub enum WinVersion {{ {primary_variants} }} impl WinVersion {{ {alias_consts} pub const AFTER_LAST: u32 = 0x{guard_const:08x}; pub fn from_name(name: &str) -> Option<WinVersion> {{ match name {{ {from_names} _ => None }} }} pub fn next_version(self) -> Option<WinVersion> {{ match self {{ {next_versions} _ => None }} }} pub fn from_u32_round_up(v: u32) -> Option<WinVersion> {{ {from_u32_round_ups} None }} }} "#, primary_variants = vers.iter().cloned() .map(|v| format!(" {:<8} = 0x{:08x},", reverse[&v], v)) .join("\n"), alias_consts = aliases.iter() .map(|&(k, v)| format!(" pub const {:<7}: WinVersion = WinVersion::{};", k, reverse[&v])) .join("\n"), guard_const = next_ver[vers.last().unwrap()], from_names = primary.iter().map(|(&k, &v)| (k, v)).chain(aliases.iter().map(|&(k, v)| (k, v))) .map(|(k, v)| format!(" \"{}\" => Some(WinVersion::{}),", k, reverse[&v])) .join("\n"), next_versions = vers.iter().cloned() .filter(|&k| reverse.contains_key(&next_ver[&k])) .map(|k| format!(" WinVersion::{:<8} => Some(WinVersion::{}),", reverse[&k], reverse[&next_ver[&k]])) .join("\n"), from_u32_round_ups = vers.iter().cloned() .map(|k| format!(" if v <= 0x{:08x} {{ return Some(WinVersion::{}); }}", k, reverse[&k])) .join("\n"), ).unwrap(); } fn link_clang() { let manifest_path = get_manifest_dir(); let bin_dir = pj!(manifest_path, "bin", get_target()); println!("cargo:rustc-link-lib=clang"); println!("cargo:rustc-link-search={}", bin_dir.to_str().unwrap()); } fn parse_int(s: &str) -> u32 { if s.starts_with("0x") || s.starts_with("0X") { u32::from_str_radix(&s[2..], 16).unwrap() } else { s.parse().unwrap() } } fn read_file_str<P>(path: P) -> String where P: AsRef<Path> + ::std::fmt::Debug { let mut s = String::new(); let _ = fs::File::open(&path).ok().expect(&format!("open {:?}", path)) .read_to_string(&mut s).ok().expect(&format!("read from {:?}", path)); s } fn get_manifest_dir() -> PathBuf { env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".into()).into() } fn get_out_dir() -> PathBuf { env::var("OUT_DIR").ok().expect("OUT_DIR *must* be set").into() } fn get_target() -> PathBuf { env::var("TARGET").unwrap_or_else(|_| "i686-pc-windows-gnu".into()).into() }
use libra_types::account_address::AccountAddress; use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd, Serialize, Deserialize)] pub struct Type(pub String); /// A vertex. /// /// Vertices are how you would represent nouns in the datastore. An example /// might be a user, or a movie. All vertices have a unique ID and a type. #[derive(Clone, Debug, Hash, Ord, PartialOrd, Serialize, Deserialize)] pub struct Vertex { /// The id of the vertex. pub id: AccountAddress, /// The type of the vertex. pub t: Type, } impl Vertex { /// Creates a new vertex with an ID generated via UUIDv1. These vertex IDs /// are trivially guessable and consequently less secure, but likely index /// better depending on the datastore. This method is suggested unless you /// need vertex IDs to not be trivially guessable. /// /// # Arguments /// /// * `t` - The type of the vertex. pub fn new(id: AccountAddress, t: Type) -> Self { Self { id, t } } pub fn new_with_bi_type(id: AccountAddress) -> Self { let t = Type("BI".to_string()); Self { t, id } } } impl PartialEq for Vertex { fn eq(&self, other: &Vertex) -> bool { self.id == other.id } } impl Eq for Vertex {}
use std::time::SystemTime; use actix::prelude::*; use diesel::{self, prelude::*}; use crate::common::error::ServerError; use crate::models::{ executor::DatabaseExecutor as DbExecutor, paste::{NewPaste, Paste}, }; pub struct CreatePasteMsg { pub title: String, pub body: String, pub created_at: SystemTime, } impl Message for CreatePasteMsg { type Result = Result<Paste, ServerError>; } impl Handler<CreatePasteMsg> for DbExecutor { type Result = Result<Paste, ServerError>; fn handle(&mut self, msg: CreatePasteMsg, _: &mut Self::Context) -> Self::Result { use crate::models::schema::pastes::dsl::*; let new_paste = NewPaste { title: &msg.title, body: &msg.body, created_at: &msg.created_at, modified_at: &msg.created_at, }; diesel::insert_into(pastes) .values(&new_paste) .get_result(&self.0.get().map_err(ServerError::R2d2)?) .map_err(ServerError::Database) } } pub struct UpdatePasteMsg { pub id: i64, pub title: String, pub body: String, pub modified_at: SystemTime, } impl Message for UpdatePasteMsg { type Result = Result<Paste, ServerError>; } impl Handler<UpdatePasteMsg> for DbExecutor { type Result = Result<Paste, ServerError>; fn handle(&mut self, msg: UpdatePasteMsg, _: &mut Self::Context) -> Self::Result { use crate::models::schema::pastes::dsl::*; diesel::update(pastes.find(msg.id)) .set(( title.eq(msg.title), body.eq(msg.body), modified_at.eq(msg.modified_at), )).get_result(&self.0.get().map_err(ServerError::R2d2)?) .map_err(ServerError::Database) } } pub struct GetPasteByIdMsg { pub id: i64, } impl Message for GetPasteByIdMsg { type Result = Result<Paste, ServerError>; } impl Handler<GetPasteByIdMsg> for DbExecutor { type Result = Result<Paste, ServerError>; fn handle(&mut self, msg: GetPasteByIdMsg, _: &mut Self::Context) -> Self::Result { use crate::models::schema::pastes::dsl::*; pastes .find(msg.id) .get_result(&self.0.get().map_err(ServerError::R2d2)?) .map_err(ServerError::Database) } } #[derive(Debug)] pub enum Item { Title, Body, CreatedAt, ModifiedAt, } #[derive(Debug)] pub enum Order { Ascend, Decrease, } #[derive(Debug)] pub enum CmpOp { GT, EQ, LT, GE, LE, } #[derive(Debug)] pub struct Orderby { pub item: Item, pub order: Order, } #[derive(Debug)] pub struct TimeCondition { pub op: CmpOp, pub time: SystemTime, } macro_rules! cmp { ($query:expr, $column:expr, $cmp:expr, $cond:expr) => { match $cmp { CmpOp::GT => $query.filter($column.gt($cond)), CmpOp::EQ => $query.filter($column.eq($cond)), CmpOp::LT => $query.filter($column.lt($cond)), CmpOp::GE => $query.filter($column.ge($cond)), CmpOp::LE => $query.filter($column.le($cond)), } }; } macro_rules! order { ($query:expr, $column:expr, $order:expr) => { match $order { Order::Ascend => $query.order($column.asc()), Order::Decrease => $query.order($column.desc()), } }; } macro_rules! orderby { ($query:expr, $column:expr, $order:expr) => { match $column { Item::Title => order!($query, title, $order), Item::Body => order!($query, body, $order), Item::CreatedAt => order!($query, created_at, $order), Item::ModifiedAt => order!($query, modified_at, $order), } }; } pub struct GetPasteListMsg { pub title_pat: Option<String>, pub body_pat: Option<String>, pub created_at: Option<TimeCondition>, pub modified_at: Option<TimeCondition>, pub orderby_list: Option<Vec<Orderby>>, pub limit: Option<i64>, pub offset: Option<i64>, } impl Default for GetPasteListMsg { fn default() -> Self { GetPasteListMsg { title_pat: None, body_pat: None, created_at: None, modified_at: None, orderby_list: None, limit: Some(20), offset: Some(0), } } } impl Message for GetPasteListMsg { type Result = Result<Vec<Paste>, ServerError>; } impl Handler<GetPasteListMsg> for DbExecutor { type Result = Result<Vec<Paste>, ServerError>; fn handle(&mut self, msg: GetPasteListMsg, _: &mut Self::Context) -> Self::Result { use crate::models::schema::pastes::dsl::*; let mut query = pastes.into_boxed(); if let Some(title_pat) = msg.title_pat { query = query.filter(title.ilike(title_pat.to_owned() + "%")); } if let Some(body_pat) = msg.body_pat { query = query.filter(body.ilike(body_pat.to_owned() + "%")); } if let Some(cond) = msg.created_at { query = cmp!(query, created_at, cond.op, cond.time); } if let Some(cond) = msg.modified_at { query = cmp!(query, modified_at, cond.op, cond.time); } if let Some(orderby_list) = msg.orderby_list { for orderby in orderby_list { query = orderby!(query, orderby.item, orderby.order); } } if let Some(limit) = msg.limit { query = query.limit(limit); } if let Some(offset) = msg.offset { query = query.offset(offset); } query .load::<Paste>(&self.0.get().map_err(ServerError::R2d2)?) .map_err(ServerError::Database) } } pub struct DelPasteByIdMsg { pub id: i64, } impl Message for DelPasteByIdMsg { type Result = Result<usize, ServerError>; } impl Handler<DelPasteByIdMsg> for DbExecutor { type Result = Result<usize, ServerError>; fn handle(&mut self, msg: DelPasteByIdMsg, _: &mut Self::Context) -> Self::Result { use crate::models::schema::pastes::dsl::*; diesel::delete(pastes) .filter(id.eq(msg.id)) .execute(&self.0.get().map_err(ServerError::R2d2)?) .map_err(ServerError::Database) } }
use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct LinkAsset { url: String, #[serde(rename = "urlfb")] fallback_url: Option<String>, #[serde(rename = "trkr")] third_party_tracker_url: Vec<String>, ext: Option<LinkAssetExt>, } #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct LinkAssetExt {}
extern crate cgmath; extern crate image; extern crate rand; use cgmath::{dot, prelude::*, vec3, Vector3}; struct Ray { origin: Vector3<f64>, direction: Vector3<f64>, } struct Intersection { pos: Vector3<f64>, distance: f64, color: Vector3<f64>, } struct Sphere { center: Vector3<f64>, radius: f64, color: Vector3<f64>, } impl Sphere { fn get_intersection(&self, ray: &Ray) -> Option<Intersection> { // https://www.scratchapixel.com/lessons/3d-basic-rendering/minimal-ray-tracer-rendering-simple-shapes/ray-sphere-intersection let l = self.center - ray.origin; let tca = dot(l, ray.direction); if tca < 0.0 { return None; } let d2 = l.magnitude2() - tca * tca; let radius2 = self.radius * self.radius; if d2 > radius2 { return None; } let thc = (radius2 - d2).sqrt(); let mut t0 = tca - thc; let mut t1 = tca + thc; if t0 > t1 { std::mem::swap(&mut t0, &mut t1); } if t0 < 0.0 { t0 = t1; if t0 < 0.0 { return None; } } Some(Intersection { pos: ray.origin + t0 * ray.direction, distance: t0, color: self.color, }) } } fn trace(spheres: &[Sphere], ray: &Ray) -> Vector3<f64> { let mut closest_hit = Intersection { pos: Vector3::zero(), distance: std::f64::MAX, color: Vector3::zero(), }; for sphere in spheres { if let Some(hit) = sphere.get_intersection(&ray) { if hit.distance < closest_hit.distance { closest_hit = hit; } } } closest_hit.color } fn main() { let spheres = vec![ Sphere { center: vec3(0.0, -10004.0, -20.0), radius: 10000.0, color: vec3(0.20, 0.20, 0.20), }, Sphere { center: vec3(0.0, 0.0, -20.0), radius: 4.0, color: vec3(1.00, 0.32, 0.36), }, Sphere { center: vec3(5.0, -1.0, -15.0), radius: 2.0, color: vec3(0.90, 0.76, 0.46), }, Sphere { center: vec3(5.0, 0.0, -25.0), radius: 3.0, color: vec3(0.65, 0.77, 0.97), }, Sphere { center: vec3(-5.5, 0.0, -15.0), radius: 3.0, color: vec3(0.90, 0.90, 0.90), }, ]; let nx = 640; let ny = 480; let ns = 1; let mut img = image::RgbImage::new(nx, ny); let inv_width = 1.0 / f64::from(nx); let inv_height = 1.0 / f64::from(ny); let fov = 30.0; let aspect_ratio = f64::from(nx) / f64::from(ny); let angle = (std::f64::consts::PI * 0.5 * fov / 180.0).tan(); for (x, y, pixel) in img.enumerate_pixels_mut() { let mut pixel_color = Vector3::zero(); for _ in 0..ns { let u = f64::from(x) + rand::random::<f64>(); let v = f64::from(y) + rand::random::<f64>(); let xx = (2.0 * ((u + 0.5) * inv_width) - 1.0) * angle * aspect_ratio; let yy = (1.0 - 2.0 * ((v + 0.5) * inv_height)) * angle; let ray = Ray { origin: Vector3::zero(), direction: vec3(xx, yy, -1.0).normalize(), }; pixel_color += trace(&spheres, &ray); } pixel_color *= 255.9999; pixel_color /= f64::from(ns); *pixel = image::Rgb([ pixel_color.x as u8, pixel_color.y as u8, pixel_color.z as u8, ]); } img.save("out.png").unwrap(); }
use std::collections::{HashMap, HashSet, VecDeque}; use std::error::Error; use std::fs::{read_to_string}; use itertools::Itertools; type Cell = (usize, usize); type Maze = HashMap<Cell, char>; type Path = Vec<Cell>; fn main() -> Result<(), Box<dyn Error>> { let contents = read_to_string("maze.txt")?; let lines = contents.lines().collect::<Vec<&str>>(); let ans = match solve(&lines.as_slice()) { Some(path) => { let solved_char = |x, y, ch | if path.contains(&(x, y)) && ch == ' ' { '$' } else { ch }; lines.iter().enumerate().map(|(y, row)| { row.char_indices().map(|(x, ch)| solved_char(x, y, ch)).collect::<String>() }).intersperse("\n".to_string()).collect::<String>() }, None => "no answer".to_string() }; println!("{}", ans); Ok(()) } fn solve(lines: &[&str]) -> Option<Path> { let mz = parse_maze(lines); let start = mz.keys().find(|cell| mz.get(cell) == Some(&'S'))?; let goal = mz.keys().find(|cell| mz.get(cell) == Some(&'G'))?; by_b(&start, &goal, &mz) } fn next_steps(&(x, y): &Cell, mz: &Maze) -> Path { let nexts = [(x + 1, y), (x - 1, y), (x, y + 1), (x, y - 1)]; nexts.iter().filter(|&cell| mz.contains_key(cell) && mz.get(cell) != Some(&'*') ).cloned().collect() } fn parse_maze(lines: &[&str]) -> Maze { let mut mz: Maze = HashMap::new(); for (y, line) in lines.iter().enumerate() { for (x, ch) in line.char_indices() { mz.insert((x, y), ch); } } mz } fn by_b(&start: &Cell, &goal: &Cell, mz: &Maze) -> Option<Path> { let mut queue = VecDeque::from(vec![vec![start]]) ; let mut moved : HashSet<Cell> = HashSet::new(); moved.insert(start); loop { match queue.pop_front() { Some(path) if path[0] == goal => return Some(path), Some(path) => for next_step in next_steps(&path[0], mz) { if !moved.contains(&next_step) { let mut elem = path.clone(); elem.insert(0, next_step); queue.push_back(elem); moved.insert(next_step); } }, None => return None } } }
use quote::quote_spanned; use super::{ FlowProperties, FlowPropertyVal, OperatorCategory, OperatorConstraints, OperatorWriteOutput, WriteContextArgs, RANGE_0, RANGE_1, }; use crate::graph::OperatorInstance; /// > 0 input streams, 1 output stream /// /// > Arguments: An iterable Rust object. /// Takes the iterable object and delivers its elements downstream /// one by one. /// /// Note that all elements are emitted during the first tick. /// /// ```hydroflow /// source_iter(vec!["Hello", "World"]) /// -> for_each(|x| println!("{}", x)); /// ``` pub const SOURCE_ITER: OperatorConstraints = OperatorConstraints { name: "source_iter", categories: &[OperatorCategory::Source], hard_range_inn: RANGE_0, soft_range_inn: RANGE_0, hard_range_out: RANGE_1, soft_range_out: RANGE_1, num_args: 1, persistence_args: RANGE_0, type_args: RANGE_0, is_external_input: false, ports_inn: None, ports_out: None, properties: FlowProperties { deterministic: FlowPropertyVal::DependsOnArgs, monotonic: FlowPropertyVal::DependsOnArgs, inconsistency_tainted: false, }, input_delaytype_fn: |_| None, write_fn: |wc @ &WriteContextArgs { op_span, ident, op_inst: OperatorInstance { arguments, .. }, .. }, _| { let iter_ident = wc.make_ident("iter"); let write_prologue = quote_spanned! {op_span=> let mut #iter_ident = { #[inline(always)] fn check_iter<IntoIter: ::std::iter::IntoIterator<Item = Item>, Item>(into_iter: IntoIter) -> impl ::std::iter::Iterator<Item = Item> { ::std::iter::IntoIterator::into_iter(into_iter) } check_iter(#arguments) }; }; let write_iterator = quote_spanned! {op_span=> let #ident = #iter_ident.by_ref(); }; Ok(OperatorWriteOutput { write_prologue, write_iterator, ..Default::default() }) }, };
use std::str::FromStr; use std::num::ParseIntError; pub mod part1; pub mod part2; pub fn default_input() -> &'static str { include_str!("input") } pub fn run() { part1::run(); part2::run(); } pub fn parse_input(input : &str) -> Vec<Row> { input.lines().map(|l| {Row::from_str(l).unwrap()}).collect() } pub struct Row { min: usize, max: usize, letter: char, password: String, } impl FromStr for Row { type Err = ParseIntError; fn from_str(s: &str) -> Result<Self, Self::Err> { let split : Vec<&str> = s.trim().split(|c| { c == '-' || c == ' '}).collect(); let min : usize = split[0].parse()?; let max : usize = split[1].parse()?; let letter : char = split[2].as_bytes()[0] as char; let password = String::from(split[3].trim()); Ok(Row {min, max, letter, password}) } } impl Row { fn is_valid(&self) -> bool { let letter_count = self.password.matches(self.letter).count(); letter_count <= self.max && letter_count >= self.min } fn is_valid2(&self) -> bool { let chars : Vec<char> = self.password.chars().collect(); (chars[(self.min-1) as usize] == self.letter) ^ (chars[(self.max-1) as usize] == self.letter) } }
use std::collections::HashMap; fn main() { let mut book_reviews: HashMap<String, String> = HashMap::new(); book_reviews.insert( "Adventures of Huckleberry Finn".to_string(), "My favorite book.".to_string(), ); book_reviews.insert( "Grimms' Fairy Tales".to_string(), "Masterpiece.".to_string(), ); book_reviews.insert( "Pride and Prejudice".to_string(), "Very enjoyable.".to_string(), ); book_reviews.insert( "The Adventures of Sherlock Holmes".to_string(), "Eye lyked it alot.".to_string(), ); if book_reviews.contains_key("Les Misรฉrables") == false { println!("We've got {} reviews, but Les Misรฉrables ain't one.", book_reviews.len()); } }