text
stringlengths
8
4.13M
use ansi_term::Colour; pub const BG: Colour = Colour::RGB(5, 40, 40); pub const FAMILY_ROLES: [&'static str; 119] = [ "family", "parent", "nuclear family", "nuclear family member", "family member", "immediate family", "spouse", "husband", "wife", "father", "mother", "step-father", "step father", "stepfather", "step-mother", "step mother", "stepmother", "step-mother", "legal guardian", "child", "son", "daughter", "step-son", "step son", "stepson", "step-daughter", "stepdaughter", "step daughter", "sibling", "brother", "sister", "extended family", "grandparent", "grandfather", "grandmother", "grandson", "paternal grandparent", "paternal grandfather", "paternal grandmother", "maternal grandparent", "maternal grandfather", "maternal grandmother", "maternal grandpa", "paternal grandpa", "maternal grandma", "paternal grandma", "uncle", "aunt", "cousin", "nephew", "niece", "family-in-law", "family in law", "father-in-law", "father in law", "mother-in-law", "mother in law", "brother in law", "sister in law", "kin", "kinship caregiver", "family member", "partner", "adoptive mother", "adoptive father", "birth mother", "birth father", "guardian", "adopted child", "adopted son", "adopted daughter", "adoptive sister", "adoptive sibling", "step sibling", "step-sibling", "stepsibling", "adoptive brother", "adopted brother", "adopted sister", "foster sister", "foster brother", "foster mother", "foster mom", "foster dad", "foster parent", "foster father", "former foster mom", "former foster mother", "former foster father", "former foster dad", "former foster parent", "former foster brother", "former foster sister", "grandma", "grandpa", "first cousin", "second cousin", "grandchild", "adoptive grandchild", "adopted grandchild", "adopted grandson", "adoptive grandson", "adoptive granddaughter", "adopted granddaughter", "adoptive uncle", "adoptive aunt", "half brother", "half sister", "biological mother", "biological father", "biological mom", "biological dad", "adoptive first cousin", "adoptive second cousin", "adoptive cousin", "foster sibling", "former foster sibling", "bio mom", "bio dad", ]; pub const FORMAL_ROLES: [&'static str; 69] = [ "intensive care coordinator", "icc", "family partner", "fp", "in home therapist", "in-home therapist", "iht", "in home behavioral therapist", "in-home behavioral therapist", "ihbt", "therapeutic mentor", "tm", "ot", "occupational therapist", "psychiatrist", "outpatient therapist", "opt", "guidance counselor", "school social worker", "social worker", "dcf worker", "guardian ad litem", "asentria worker", "mentor", "therapist", "behavioral therapist", "parole officer", "primary care physician", "pcp", "therapeutic training and support", "therapeutic training & support", "tt&s", "tt and s", "dmh worker", "clinician", "therapeutic training and support mentor", "therapeutic training & support mentor", "tt&s mentor", "tt and s mentor", "teacher", "special education teacher", "school guidance counselor", "lifeset worker", "lifeset mentor", "bcba", "yapm", "young adult peer mentor", "case manager", "dds case manager", "dds case coordinator", "case coordinator", "dmh case manager", "dcf worker", "dcf social worker", "behavior monitor", "bm", "hospital case manager", "mci family partner", "mobile crisis intervention family partner", "academic support", "adjustment counselor", "school adjustment counselor", "dds service coordinator", "service coordinator", "dds intensive case manager", "intensive case manager", "dds intensive flexible family support", "intensive flexible family support", "psychiatric nurse practitioner" ]; pub const INDIRECT_ROLES: [&'static str; 25] = [ "director", "clinical director", "principal", "assistant director", "assistant clinical director", "director of special education", "clinical supervisor", "director of social and emotional learning", "assistant director of special education", "crisis support worker", "crisis clinician", "crisis response clinician", "mci worker", "mci clinician", "mobile crisis intervention", "emergency services clinician", "emergency services", "crisis assessment clinician", "mobile crisis intervention clinician", "mobile crisis intervention worker", "crisis support clinician", "crisis response worker", "mobile crisis clinician", "intake coordinator", "supervisor", ]; pub const DEFAULT_NOTE_TEMPLATES: [(&'static str, &'static str); 28] = [ ( "Care Plan", "\ (---u---) met with (---cpt---) (---mm---). \ All team members completed introductions as necessary. \ (---cu---). \ The team went over all elements of the agenda including the team mission, \ family vision, and ground rules. \ The team went over strengths for (---c---) and (---pb3@5@---) family related to \ the current goal. \ Team members provided updates on and discussed (---c---)'s (---yt---). \ The team discussed updates on (---cpto---) for (---c---). \ (---cu---). \ The team brainstormed action steps for (---c---)'s goal of \"(---go---).\" \ (---u---) scheduled the (---im---) for (---c---) for (---c---).\ ", ), ( "Care Plan", "\ Met with (---co---) in family home in (---cu---) for CPM. \ (---u---) passed around sign-in sheet. \ The team reviewed ground rules and team mission. \ (---u---) reviewed team strengths and family vision. \ The team brainstormed around the following goal: (---go---). \ (---u---) assigned tasks to team members. \ The team reported no major safety concerns.\ ", ), ( "Intake", "\ (---u---) met with (---co---) for an intake for (---c---). \ (---u---) and (---p---) introduced themselves and discussed \ (---pc---)'s hopes and expectations for Wraparound using \ open-ended questions to explore changes the family wants to make. \ (---cu---). \ The team elicited (---pc---)'s experience of challenges (---pb1@8@---) and \ (---c---) have faced, including (---c---)'s (---yt---). \ The team elicited and reflected the family's strengths, including (---cu---). \ (---u---) explained the Wraparound process. \ (---u---) and (---p---) shared information about the limitations and intensive \ nature of Wraparound services as well as the role of different team members. \ (---u---) and (---p---) discussed their roles and the structure of Wraparound \ services. \ (---u---) reviewed the (---id---) with (---g---). \ (---u---) elicited verbal consent for the (---id---) and (---g---) (---sm---) \ the (---id---). \ (---g---) (---sm---) Release of Information forms permitting (---u---) and (---p---) \ to exchange (---c---)'s Protected Health Information with (---co---). \ (---cu---). \ (---u---) scheduled the (---im---) for (---cu---).\ ", ), ( "Intake", "\ (---u---) met for intake for (---c---) with (---co---) at the family's home in (---cu---). \ ICC reviewed wraparound process and paperwork. \ Paperwork included: (---id---). \ (---pc---) discussed her major needs/concerns for (---c---). \ (---pc---) reported that Andrew struggles with (---cu---). \ (---cu---). \ (---u---) and (---p---) further discussed the Wraparound process with the family \ and how the process could be tailored to their needs. \ (---cu---). \ (---u---) and (---p---) scheduled the (---im---) with the family.\ ", ), ( "Assessment", "\ (---u---) met with (---co---) for the assessment of (---c---). \ (---pc---) shared updates on (---c---)'s recent (---yt---). \ (---u---) listened and used open-ended questions to learn more about (---c---)'s \ experience and the perspectives of (---co---) while also addressing \ assessment of (---c---)'s recent (---yt---). \ (---cu---). \ (---u---) scheduled the (---im---) for (---cu---).\ ", ), ( "Assessment", "\ (---u---) met with (---co---) to complete comprehensive and CANS assessments for (---c---) in the \ family's home in (---cu---). (---u---) gathered (---c---)'s current needs and challenges. \ ICC gathered current and past supports and services, family background information, educational information, \ history of abuse/trauma, medications and medical information, diagnoses, and assessed risk/safety. \ (---cu---). \ ICC further assessed (---c---)'s needs and strengths. \ (---cu---).\ ", ), ( "Agenda Prep", "\ (---u---) met with (---co---) for the Agenda Prep for (---c---)'s next Care Plan Meeting.\ The team went over all elements of the agenda including the team mission, family vision, \ and ground rules. \ The team went over recent updates on (---c---)'s (---yt---). \ The team discussed potential treatment goals for (---c---) and agreed \ on addressing (---g---) for the next Care Plan Meeting. \ (---u---) scheduled the (---im---) for (---cu---).\ ", ), ( "Debrief", "\ (---u---) and (---p---) met with (---pc---) for the debrief of (---c---)'s most \ recent Care Plan Meeting. \ The team went over and agreed on action steps to implement, including (---cu---). \ (---cu---). \ (---u---) scheduled the (---im---) for (---cu---).\ ", ), ( "Referral", "\ (---u---) sent all required documents via (---cm---) to (---co---) in order to \ complete a referral for (---c---) for (---s---).\ ", ), ( "Parent Support", "\ (---u---) (---ps---) (---pc---) (---cu---). \ ", ), ( "Parent Appearance", "\ (---co---) appeared (---ap---). \ ", ), ( "Parent Skills", "\ (---co---) demonstrated effective (---ps---). \ ", ), ( "Brainstorm Contribution", "\ (---u---) contributed to the team brainstorm ideas for (---c---)'s \ Care Plan goal of \"(---go---).\"\ ", ), ( "Collateral Outreach", "\ (---u---) reached out to (---co---) via (---cm---) and (---fcp---). \ ", ), ( "Update From Collateral", "\ (---u---) received a (---cm---) from (---co---) informing (---pu2---) that (---cu---). \ ", ), ( "Discuss Communication", "\ (---u---) called (---co---) to discuss recent communication between (---co---). \ ", ), ( "Invited To Meeting", "\ (---u---) sent Zoom and Outlook invitations for the upcoming (---im---) for (---c---) \ to (---co---).\ ", ), ( "Failed Contact Attempt", "\ (---u---) reached out to (---co---) via (---cm---) to (---cp---) but was unable to reach (---pb2@2@---). \ ", ), ( "Received Verbal Consent", "\ (---u---) contacted (---g---) via (---cm---) and received verbal consent \ for (---u---) and Riverside Community Care to exchange (---c---)'s \ Protected Health Information with (---co---).\ ", ), ( "Received Written Consent", "\ (---u---) received written consent via (---cm---) for (---u---) and Riverside \ Community Care to exchange (---c---)'s Protected Health Information with (---co---).\ ", ), ( "Documentation", "\ (---u---) reviewed notes and completed daily logs for (---td---). \ ", ), ( "Documentation", "\ (---u---) entered into evolv summaries of treatment events for (---c---) for today, (---td---). \ ", ), ( "Updated Document", "\ (---u---) updated the (---id---) for (---c---) with updates on (---pc3---) (---yt---). \ ", ), ( "Sent Document", "\ (---u---) sent the (---id---) for (---c---) to (---co---) via (---cm---). \ ", ), ( "Sent Cancellation", "\ (---u---) emailed (---co---) to cancel the (---im---) for (---c---). \ ", ), ( "Authorization Requested", "\ (---u---) (---cm---) (---co---) to request a new insurance authorization for (---c---). \ ", ), ( "Authorization Issued", "\ (---u---) received a phone call from (---co---) confirming that a new insurance authorization \ had been issued for (---c---) for (---cu---) with reference number (---cu---). \ (---u---) emailed (---co---) to notify (---pb2@7@---).\ ", ), ( "Categorized Emails", "\ (---u---) categorized sent emails for (---c---) into separate folders for record keeping. \ ", ), ];
use serde::Serialize; use common::result::Result; use crate::application::dtos::{AuthorDto, CategoryDto, CollectionDto, PublicationDto}; use crate::domain::author::AuthorRepository; use crate::domain::category::CategoryRepository; use crate::domain::collection::{CollectionId, CollectionRepository}; use crate::domain::publication::PublicationRepository; #[derive(Serialize)] pub struct GetByIdResponse { pub id: String, pub author: AuthorDto, pub name: String, pub synopsis: String, pub category: CategoryDto, pub tags: Vec<String>, pub publications: Vec<PublicationDto>, } pub struct GetById<'a> { author_repo: &'a dyn AuthorRepository, category_repo: &'a dyn CategoryRepository, collection_repo: &'a dyn CollectionRepository, publication_repo: &'a dyn PublicationRepository, } impl<'a> GetById<'a> { pub fn new( author_repo: &'a dyn AuthorRepository, category_repo: &'a dyn CategoryRepository, collection_repo: &'a dyn CollectionRepository, publication_repo: &'a dyn PublicationRepository, ) -> Self { GetById { author_repo, category_repo, collection_repo, publication_repo, } } pub async fn exec(&self, collection_id: String) -> Result<CollectionDto> { let collection_id = CollectionId::new(collection_id)?; let collection = self.collection_repo.find_by_id(&collection_id).await?; let author = self.author_repo.find_by_id(collection.author_id()).await?; let category = self .category_repo .find_by_id(collection.header().category_id()) .await?; let mut publications = Vec::new(); for item in collection.items().iter() { let publication = self .publication_repo .find_by_id(item.publication_id()) .await?; let author = self.author_repo.find_by_id(publication.author_id()).await?; let category = self .category_repo .find_by_id(publication.header().category_id()) .await?; publications.push( PublicationDto::from(&publication) .author(AuthorDto::from(&author)) .category(CategoryDto::from(&category)) .status(&publication), ) } Ok(CollectionDto::from(&collection) .author(AuthorDto::from(&author)) .category(CategoryDto::from(&category)) .publications(publications)) } }
#[macro_use] extern crate askama; #[macro_use] extern crate lazy_static; use std::path::Path; pub mod config; pub mod env_scan; pub mod ike_scan; pub mod opts; pub mod report; use config::Config; use report::scan_report::ScanReport; lazy_static! { pub static ref CONFIG: Config = ::opts::get_opts(); } fn verify_env_reqs() { if !Path::new(ike_scan::IKE_SCAN_BIN).exists() { panic!("Unable to find binary dependency"); } if CONFIG.verbose { println!("Environment ready"); } } fn main() { if CONFIG.verbose { println!("IP of VPN endpoint: {}", CONFIG.vpn_endpoint_ip); println!("Using source port: {}", CONFIG.source_port); } verify_env_reqs(); let mut report = ScanReport::new(); ike_scan::run(&mut report); env_scan::run(&mut report); report.write_file(); }
#![allow(non_snake_case)] // b/c serialized message types from Jira use camelCase #![allow(dead_code)] mod credentials; mod jira_api; mod jira_sqlite; mod jira_types; use credentials::get_creds; use structopt::StructOpt; // use crossterm::{ // execute, input, style, AsyncReader, Clear, ClearType, Color, Crossterm, Goto, InputEvent, // KeyEvent, PrintStyledFont, RawScreen, Result, Show, // }; static JIRA_URL: &str = "https://jira.walmart.com/rest/api/2"; #[derive(StructOpt, Debug)] #[structopt(name = "fd-jira")] struct Opt { /// Synchronize issues with changes that occured since last sync sync: bool, } fn sync_issues() { // get the first (and possibly only) batch of issues let conn = rusqlite::Connection::open("./fd-jira.db").unwrap(); let mut cur_start: usize = 0; let mut count: usize = 0; let creds = get_creds().unwrap(); loop { let query = crate::jira_api::get_changed_issues(&creds, JIRA_URL, cur_start); jira_sqlite::write_issues(&conn, &query.issues).unwrap(); count += query.issues.len(); if count < query.total { cur_start = count; } else { println!("sync complete. {} issues refreshed.", count); break; } } } fn main() { // let crossterm = Crossterm::new(); // let _raw = RawScreen::into_raw_mode(); // crossterm.cursor().hide()?; let opt = Opt::from_args(); jira_sqlite::init_database().unwrap(); if opt.sync { sync_issues(); } let creds = get_creds().unwrap(); jira_api::get_issue_snapshot(&creds, JIRA_URL, "RCTFD-4472"); }
use crate::{ compute::{intersect_tris, partition_tris, ComputeError, Vk, VkError}, geo::*, }; use pyo3::{exceptions::PyException, prelude::*}; impl From<VkError> for PyErr { fn from(err: VkError) -> Self { PyException::new_err(err.to_string()) } } impl From<ComputeError> for PyErr { fn from(err: ComputeError) -> Self { PyException::new_err(err.to_string()) } } #[pymodule] pub fn compute(_py: Python, m: &PyModule) -> PyResult<()> { #[pyfn(m, "init_vk")] fn init_vk_py(_py: Python) -> PyResult<Vk> { Ok(Vk::new()?) } #[pyfn(m, "intersect_tris")] fn intersect_tris_py(_py: Python, tris: Vec<Triangle3d>, points: Vec<Point3d>, vk: &Vk) -> PyResult<Vec<Point3d>> { Ok(intersect_tris(&tris, &points, &vk)?) } #[pyfn(m, "partition_tris")] fn partition_tris_py( _py: Python, tris: Vec<Triangle3d>, columns: Vec<Line3d>, vk: &Vk, ) -> PyResult<Vec<Vec<Triangle3d>>> { Ok(partition_tris(&tris, &columns, vk)?) } Ok(()) }
#[doc = "Register `APB3LPENR` reader"] pub type R = crate::R<APB3LPENR_SPEC>; #[doc = "Register `APB3LPENR` writer"] pub type W = crate::W<APB3LPENR_SPEC>; #[doc = "Field `LTDCLPEN` reader - LTDC peripheral clock enable during CSleep mode Set and reset by software. The LTDC peripheral clocks are the kernel clock provided to ltdc_ker_ck input and the rcc_pclk3 bus interface clock."] pub type LTDCLPEN_R = crate::BitReader<LTDCLPEN_A>; #[doc = "LTDC peripheral clock enable during CSleep mode Set and reset by software. The LTDC peripheral clocks are the kernel clock provided to ltdc_ker_ck input and the rcc_pclk3 bus interface clock.\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum LTDCLPEN_A { #[doc = "0: The selected clock is disabled during csleep mode"] Disabled = 0, #[doc = "1: The selected clock is enabled during csleep mode"] Enabled = 1, } impl From<LTDCLPEN_A> for bool { #[inline(always)] fn from(variant: LTDCLPEN_A) -> Self { variant as u8 != 0 } } impl LTDCLPEN_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> LTDCLPEN_A { match self.bits { false => LTDCLPEN_A::Disabled, true => LTDCLPEN_A::Enabled, } } #[doc = "The selected clock is disabled during csleep mode"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == LTDCLPEN_A::Disabled } #[doc = "The selected clock is enabled during csleep mode"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == LTDCLPEN_A::Enabled } } #[doc = "Field `LTDCLPEN` writer - LTDC peripheral clock enable during CSleep mode Set and reset by software. The LTDC peripheral clocks are the kernel clock provided to ltdc_ker_ck input and the rcc_pclk3 bus interface clock."] pub type LTDCLPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, LTDCLPEN_A>; impl<'a, REG, const O: u8> LTDCLPEN_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "The selected clock is disabled during csleep mode"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(LTDCLPEN_A::Disabled) } #[doc = "The selected clock is enabled during csleep mode"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(LTDCLPEN_A::Enabled) } } #[doc = "Field `WWDGLPEN` reader - WWDG clock enable during CSleep mode Set and reset by software."] pub use LTDCLPEN_R as WWDGLPEN_R; #[doc = "Field `WWDGLPEN` writer - WWDG clock enable during CSleep mode Set and reset by software."] pub use LTDCLPEN_W as WWDGLPEN_W; impl R { #[doc = "Bit 3 - LTDC peripheral clock enable during CSleep mode Set and reset by software. The LTDC peripheral clocks are the kernel clock provided to ltdc_ker_ck input and the rcc_pclk3 bus interface clock."] #[inline(always)] pub fn ltdclpen(&self) -> LTDCLPEN_R { LTDCLPEN_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 6 - WWDG clock enable during CSleep mode Set and reset by software."] #[inline(always)] pub fn wwdglpen(&self) -> WWDGLPEN_R { WWDGLPEN_R::new(((self.bits >> 6) & 1) != 0) } } impl W { #[doc = "Bit 3 - LTDC peripheral clock enable during CSleep mode Set and reset by software. The LTDC peripheral clocks are the kernel clock provided to ltdc_ker_ck input and the rcc_pclk3 bus interface clock."] #[inline(always)] #[must_use] pub fn ltdclpen(&mut self) -> LTDCLPEN_W<APB3LPENR_SPEC, 3> { LTDCLPEN_W::new(self) } #[doc = "Bit 6 - WWDG clock enable during CSleep mode Set and reset by software."] #[inline(always)] #[must_use] pub fn wwdglpen(&mut self) -> WWDGLPEN_W<APB3LPENR_SPEC, 6> { WWDGLPEN_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb3lpenr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb3lpenr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct APB3LPENR_SPEC; impl crate::RegisterSpec for APB3LPENR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`apb3lpenr::R`](R) reader structure"] impl crate::Readable for APB3LPENR_SPEC {} #[doc = "`write(|w| ..)` method takes [`apb3lpenr::W`](W) writer structure"] impl crate::Writable for APB3LPENR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets APB3LPENR to value 0x48"] impl crate::Resettable for APB3LPENR_SPEC { const RESET_VALUE: Self::Ux = 0x48; }
// This file is part of Substrate. // Copyright (C) 2017-2020 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. //! RPC Metadata use std::sync::Arc; use jsonrpc_core::futures::sync::mpsc; use jsonrpc_pubsub::{PubSubMetadata, Session}; /// RPC Metadata. /// /// Manages persistent session for transports that support it /// and may contain some additional info extracted from specific transports /// (like remote client IP address, request headers, etc) #[derive(Default, Clone)] pub struct Metadata { session: Option<Arc<Session>>, } impl jsonrpc_core::Metadata for Metadata {} impl PubSubMetadata for Metadata { fn session(&self) -> Option<Arc<Session>> { self.session.clone() } } impl Metadata { /// Create new `Metadata` with session (Pub/Sub) support. pub fn new(transport: mpsc::Sender<String>) -> Self { Metadata { session: Some(Arc::new(Session::new(transport))) } } /// Create new `Metadata` for tests. #[cfg(test)] pub fn new_test() -> (mpsc::Receiver<String>, Self) { let (tx, rx) = mpsc::channel(1); (rx, Self::new(tx)) } } impl From<mpsc::Sender<String>> for Metadata { fn from(sender: mpsc::Sender<String>) -> Self { Self::new(sender) } }
/// /// This is an import. It imports something into scope /// /// In this case, it's a `trait` which is akin to a /// Java interface with a few differences. /// /// We import traits like `rand::Rng` and `io::Write` /// (Which is also a trait) to be able to use their /// methods and associated functions on objects which /// implement them. In other words, they must be in /// scope to be usable. /// use rand::Rng; use std::io::Write; /// /// An enumeration. This lists the colours we can use /// /// In this case, this is similar to a Java enum, in /// that there is no attached data to each variant. /// /// An enum is a fully qualified type, meaning we can /// implement traits for it, which in this case include /// `Clone` (Cloneable), `Copy` (Can copy bitwise), /// `Debug` (Displayable), `PartialEq` (`==` operator). /// We implement these using a shorthand for auto code /// generation called `derive`. It's pretty common in /// rust. /// #[derive(Clone, Copy, Debug, PartialEq)] enum Colour { Red, Orange, Blue, White, Yellow, Green, } /// /// We implement the `str::FromStr` trait to be able /// to parse a `Colour` from user input in an idiomatic /// way. /// /// A major difference with Java is that implementations /// of things and interfaces/traits on things is that /// they are declared separate from the thing's declaration. /// /// For example, in Java you'd say /// ``` /// public class Foo implements MyInterface extends MyClass, MyAbstractClass { /**/ } /// ``` /// While in rust you'd say /// ``` /// pub struct Foo { /* My data members */ } /// impl MyTrait for Foo { /// // MyTrait method implementations here. /// } /// impl Foo { /// // Foo methods here. /// } /// ``` /// impl std::str::FromStr for Colour { /// /// This is the error type we return when we get an error. /// This is called an associated type, it's named by the /// trait. /// type Err = String; /// /// We take a string (`&str`) and spit out a `Result` which /// is either `Ok()` or `Err()`. This is error handling in rust. /// /// When we say `Self` we say the type which we're implementing /// for. In this case it's `Colour`. /// fn from_str(text: &str) -> Result<Self, Self::Err> { /// /// This could be called either lazyness or smartness. It imports /// the names of each of the colours into scope to allow use to /// omit `Colour::` before each name. For example, `Colour::Orange` /// becomes `Orange`. /// use Colour::*; // // A chain operation which essentially gets the first character // or, if it fails (Input is too short) will return early with // a message (The ? operator a few lines in means return early // if we get an `Err()` variant or strip the `Result` layer to // get the value (The `char`) back. // let first: char = text .chars() .next() .ok_or::<Self::Err>("Input too short!".into())? // Make it lowercase .to_ascii_lowercase(); // // Rust's superpower `switch` statement. // match first { 'r' => Ok(Red), // 'b' => Ok(Blue), // 'w' => Ok(White), // All of these branches return a `Result` 'y' => Ok(Yellow), // If they're okay with it. 'g' => Ok(Green), // 'o' => Ok(Orange), // // In the case we get absolutely anything else, we return an error // telling us what we got instead. _ => Err(format!("Invalid initial character: `{}`", first)), } } } // // This just enumerates the colours, for ease of use. // static COLOURS: &'static [Colour] = &[ Colour::Red, Colour::Blue, Colour::White, Colour::Yellow, Colour::Green, Colour::Orange, ]; /// /// We make a state struct because we may want to /// expose a gui layer, which isn't able to access /// variables local to functions. /// /// This has a single generic parameter `'a` which is /// a lifetime. I can't summarize lifetimes in this /// short project, but it should suffice to say it's /// rust's gimmick to make it so fast and safe (As fast /// as C++ and safe as GC languages as Java). /// /// There's a nice entry in the rust book should you /// want to explore more on this: /// https://doc.rust-lang.org/book/ch10-03-lifetime-syntax.html /// struct State<'a> { /// /// The pegs we're looking at right now. /// /// A `Vec` is the equivalent of a `List` or `ArrayList` in Java. /// pegs: Vec<Colour>, /// /// Previously chosen peg combinations, could be /// useful for logging events, etc. Currently unused /// other than to count number of attempts. /// pub previously_chosen: Vec<Vec<Colour>>, /// /// The previous games we've played (IE the previous /// states of `pegs`. This uses a tuple to denote the /// contents. Tuples are structures whose names are /// simply the types they contain. This one contains /// a list of colours, the number of tries it took /// (`usize` if a number type in rust, like `int` or /// `long` in Java), and whether it was won (`bool`) /// pub previous_games: Vec<(Vec<Colour>, usize, bool)>, /// /// Max number of pegs we can play with. /// size_pegs: usize, allow_duplicates: bool, /// /// We buffer the input, because it could be input /// over multiple lines or through a gui. Once the /// size reaches `size_pegs`, it will flush and try /// to finish a move. /// buffered_input: Vec<Colour>, /// /// Optionally describes the maximum number of moves /// in a game. /// max_tries: Option<usize>, /// /// A function pointer (AKA a variable that is a /// function) this is called when the player wins. /// win: Box<dyn Fn() + 'a>, /// /// Called when the player loses. /// lose: Box<dyn Fn() + 'a>, /// /// Terminal mode. Only outputs terminal prompts /// and messages if this is true. /// terminal: bool, } /// /// Implementing the state /// impl<'a> State<'a> { /// /// A `new` function, akin to a constructor, will /// create a new `State` given some configuration /// parameters. /// /// Note that this may or may not return `Self` ( /// AKA `State`) if it has invalid parameters. /// fn new( size: usize, allow_duplicates: bool, // // Equivalent of an `Integer` in Java which allows a // nullable `int`. // max_tries: Option<usize>, // // These functions use fancy talk in rust to say that // this function (new) is generic over some type which // implements the function traits. Kind of like this: // ```java // public static<T> void new(...) where T: MyInterface // ``` // win: impl Fn() + 'a, lose: impl Fn() + 'a, terminal: bool, ) -> Option<Self> { //Check if there is a problem with our config if size > COLOURS.len() { if terminal { println!( "Choose less than or equal to {} pegs to play with!", COLOURS.len() ) } // Return error state (`null` equivalent) if error None } else { let pegs = Self::generate_new_pegs(size, allow_duplicates); Some( // This is an inline constructor, we just specify // each field's value on declaration Self { pegs, previously_chosen: Vec::with_capacity(max_tries.clone().unwrap_or(0)), previous_games: Vec::new(), size_pegs: size, allow_duplicates, buffered_input: Vec::with_capacity(size), max_tries, win: Box::new(win), lose: Box::new(lose), terminal, }, ) } } /// /// This is an associated function which isn't run on anything. This /// is similar to Java's `static` methods except those have access to /// `static` state, which could be present in a class. These on the /// other hand cannot mutate anything outside of the function. /// /// From within this `impl` block, you call this as `Self::generate_new_pegs()` /// fn generate_new_pegs(size: usize, allow_duplicates: bool) -> Vec<Colour> { let mut rng = rand::thread_rng(); // We use `.to_vec` so that we can remove items from it let mut choice_pegs = COLOURS.to_vec(); if allow_duplicates { // This is equivalent to looping over `0` to `size` and // collecting these values into a list: // // choice_pegs[rng.gen::<usize>() % choice_pegs.len()] // (0..size) .map(|_| choice_pegs[rng.gen::<usize>() % choice_pegs.len()]) .collect() } else { // This is the same idea except that instead of using indexing // we use removal, which will remove the item from the list and // return it. (0..size) .map(move |_| choice_pegs.remove(rng.gen::<usize>() % choice_pegs.len())) .collect() } // Because each branch of the if block ends with an implicit return // expression which isn't `void` (Or in rust, ()), the if statement // itself is now an expression which returns a value. } /// /// Returns either the number of correct placements followed by /// present colours or a string describing an error in the case /// it is not happy with its inputs. /// fn matching(&self, idx: Option<usize>) -> Result<(usize, usize), String> { let player = idx .map(|x| &self.previously_chosen[x]) .unwrap_or(&self.buffered_input); if !self.allow_duplicates { let mut seen = Vec::new(); player .iter() .enumerate() .try_fold((0, 0), |mut state, (idx, val)| { if seen.contains(val) { return Err("Cannot have duplicated when using non-duplicate mode!".into()); } seen.push(*val); if self.pegs.contains(val) { if self.pegs[idx] == *val { state.0 += 1; } else { state.1 += 1; } } Ok(state) }) } else { Ok(player .iter() .enumerate() .fold((0, 0), |mut state, (idx, val)| { if self.pegs.contains(val) { if self.pegs[idx] == *val { state.0 += 1; } else { state.1 += 1; } } state })) } } /// /// Pushes a colour into our buffered input, returning /// if a game change (Not a turn change) occurred. /// fn input_buffer(&mut self, value: Colour) -> Result<bool, String> { self.buffered_input.push(value); if self.buffered_input.len() == self.size_pegs { self.finish_try() } else { Ok(false) } } /// /// Parse and push a whole string as an input into the buffer. /// This uses `input_buffer` on every character in the string. /// fn push_string_input(&mut self, mut text: &str) -> Result<bool, (String, bool)> { let mut should_reset = false; while text.len() > 0 { //Intentionally ignoring the output because we can accept //strings longer than the max size and just keep processing //them to enter multiple tries at the same time. should_reset |= text .parse() .and_then(|x| self.input_buffer(x)) .map_err(|x| (x, should_reset))?; // This line could break should we get a non-ascii character // but it should be fine for now text = &text[1..]; } Ok(should_reset) } /// /// Decides to either win the game, or not, or keep going. /// fn finish_try(&mut self) -> Result<bool, String> { // `true` if we've finished a game, false if we've finished a round let returns; if self.buffered_input == self.pegs { // Call our function we assigned at the start if we win. // This currently just prints a "You win" message (self.win)(); self.previous_games .push((self.pegs.clone(), self.previously_chosen.len(), true)); self.reset(); returns = true; } else { if self.max_tries.unwrap_or(std::usize::MAX) == self.previously_chosen.len() + 1 { (self.lose)(); self.previous_games .push((self.pegs.clone(), self.previously_chosen.len(), false)); self.reset(); returns = true; } else { if self.terminal { let matching = self.matching(None)?; println!( "Good try, here are your matching pegs: {:?} are in the correct position and {:?} have the right colour", matching.0, matching.1, ) } self.previously_chosen .push(self.buffered_input.drain(..).collect()); returns = false; } } Ok(returns) } fn reset(&mut self) { self.previously_chosen = Vec::new(); self.buffered_input.clear(); self.pegs = Self::generate_new_pegs(self.size_pegs, self.allow_duplicates); } } /// /// A mock main, meant to be copy-pasteable into other places. /// pub fn main() -> Result<(), Box<dyn std::error::Error>> { // There are three ways to write a string in rust, // "this way", r#"this way"#, and r"this way". // The first one is your standard string with escape // sequences like \n, etc. The second one is to ignore // all characters between the #"s and just take them // as if they were text. r"" text is a byte array literal // instead of a string. println!( r#" ~~~~ Mastermind ~~~~ Rules: A set of pegs from the following colours are selected: ┏━━━━━━┳━━━━━━┳━━━━━━┓ ┃Orange┃Yellow┃ Red ┃ ┣━━━━━━╋━━━━━━╋━━━━━━┫ ┃ Blue ┃Green ┃White ┃ ┗━━━━━━┻━━━━━━┻━━━━━━┛ The player takes guesses at the selected colours, and is given the number of pegs in a correct position (And colour) and the number of correct colours chosen in an incorrect position. "# ); let mut input = String::new(); print!("Would you like to allow duplicates? (\"true\" or \"false\"): "); std::io::stdout().flush()?; std::io::stdin().read_line(&mut input)?; let mut duplicates = input.trim().parse::<bool>(); let duplicates = loop { match duplicates { Ok(x) => break x, Err(_) => { println!("Please try again! Either `true` or `false`."); input.clear(); std::io::stdin().read_line(&mut input)?; duplicates = input.trim().parse(); } } }; input.clear(); print!("How many pegs would you like to play with? (2-6, inclusive): "); std::io::stdout().flush()?; std::io::stdin().read_line(&mut input)?; let mut pegs = input.trim().parse::<usize>(); let pegs = loop { match pegs { Ok(x @ 2..=6) => break x, _ => { println!("Please try again! Enter a valid positive integer from 2-6 inclusive."); input.clear(); std::io::stdin().read_line(&mut input)?; pegs = input.trim().parse(); } } }; // Here we use our new function above. let mut state = State::new( pegs, duplicates, Some(10), || println!("You won!"), || println!("Uh-oh, you lost"), true, ) .unwrap(); for i in 0..2 { println!("Generated new state! Game #{}", i + 1); 'a: loop { print!("Enter next colours > "); std::io::stdout().flush()?; input.clear(); std::io::stdin().read_line(&mut input)?; match state.push_string_input(input.trim()) { Ok(f) => { if f { break 'a; } } Err((text, f)) => { println!("Error encountered: {}", text); if f { break 'a; } } } } } println!("Previous games:"); for (idx, (pegs, attempts, won)) in state.previous_games.iter().enumerate() { println!( "Game #{} with pegs {:?} was {} with {} attempts", idx + 1, pegs, if *won { "won" } else { "lost" }, attempts ); } Ok(()) }
#![feature(custom_derive, plugin)] #![plugin(serde_macros)] extern crate slack; extern crate chrono; extern crate hyper; extern crate serde_json; extern crate toml; extern crate rustc_serialize; use chrono::{UTC, TimeZone, Local}; use std::io::Read; use std::fs::File; use hyper::Client; use std::process; struct EventHandler { config: Config, } impl EventHandler { fn new(config: Config) -> EventHandler { EventHandler { config: config } } } impl slack::EventHandler for EventHandler { fn on_event(&mut self, client: &mut slack::RtmClient, event: Result<&slack::Event, slack::Error>, _: &str) { let event = event.unwrap(); let message = match *event { slack::Event::Message(ref m) => m, _ => return, }; let txt; let chan; match *message { slack::Message::Standard { ts: _, user: _, is_starred: _, pinned_to: _, reactions: _, edited: _, attachments: _, ref text, ref channel } => { txt = text.clone().unwrap(); chan = channel.clone().unwrap(); } _ => return, }; let bot_tag = format!("<@{}>", client.get_id().unwrap()); let cmd = parse_command(&txt, &bot_tag); let reply = match cmd { Command::Annotate(annotate) => { save(&self.config, &annotate); "Done! Annotation added." } Command::Help => { "Type your annotation in \"title. tag 1,tag 2, tag 3. time.\" or \n \"title. tag \ 1,tag 2, tag 3.\"" } Command::None => "Sorry, I don't know what you want", }; let _ = client.send_message(&chan, &reply); } fn on_ping(&mut self, _: &mut slack::RtmClient) {} fn on_close(&mut self, _: &mut slack::RtmClient) {} fn on_connect(&mut self, _: &mut slack::RtmClient) {} } #[derive(Debug)] enum Command { Help, Annotate(Annotate), None, } #[derive(Debug, Serialize, Deserialize)] struct Annotate { what: String, tags: Vec<String>, when: i64, } impl Annotate { fn new(what: &String, tags: &String, when: &Option<&String>) -> Annotate { Annotate { what: what.clone().trim().to_string(), tags: tags.split(",").map(|s| s.trim().to_string()).collect::<Vec<String>>(), when: when.and_then(|s| { let date = Local.datetime_from_str(s, "%F %R"); match date { Ok(date) => Some(date.with_timezone(&UTC)), Err(_) => Some(UTC::now()), } }) .or_else(|| Some(UTC::now())) .and_then(|d| Some(d.timestamp() * 1000)) .unwrap(), } } } fn parse_command(message: &String, bot_tag: &String) -> Command { let tokens: Vec<String> = message.split(".").map(|s| s.trim().to_string()).collect(); let cmd_token = match tokens.get(0) { Some(s) => s.trim_left_matches(bot_tag).to_string(), None => return Command::None, }; match cmd_token.as_ref() { "help" => Command::Help, _ => { if tokens.len() >= 2 { return Command::Annotate(Annotate::new(&cmd_token, &tokens.get(1).unwrap(), &tokens.get(2))); } else { return Command::None; } } } } fn save(config: &Config, annotate: &Annotate) { let client = Client::new(); let body = serde_json::to_string(annotate).unwrap(); let resp = client.post(config.url.as_str()) .body(body.as_str()) .send(); match resp { Ok(_) => {}, Err(err) => println!("{:?}", err), } } #[derive(RustcDecodable, Clone)] struct Config { slack_key: String, url: String, } fn main() { let mut config = String::new(); let _ = File::open("config.toml") .and_then(|mut f| f.read_to_string(&mut config)) .map_err(|e| { println!("{}", e); process::exit(1); }); let mut parser = toml::Parser::new(&config); let parsed = parser.parse().unwrap(); let config_parsed = parsed.get("config").unwrap(); let config = toml::decode::<Config>(config_parsed.clone()).unwrap(); let mut event_handler = EventHandler::new(config.clone()); let mut cli = slack::RtmClient::new(&config.slack_key.clone()); let result = cli.login_and_run(&mut event_handler); match result { Ok(_) => {} Err(err) => panic!("Error: {}", err), } }
#[derive(Debug)] struct User { active: bool, username: String, email: String, sign_in_count: u64, } fn build_user(email: String, username: String) -> User { User { email, // Field Init Shorthand username, active: true, sign_in_count: 1, } } // tuple structs struct Color(i32, i32, i32); struct Point(i32, i32, i32); fn main() { let mut user1 = User { email: String::from("test@example.com"), username: String::from("r3lik"), active: true, sign_in_count: 1, }; // access struct values with dot notation user1.email = String::from("r3lik@g.com"); // we can create a User with pre-populated values let user2 = build_user( String::from("test@yahoo.com"), String::from("r3lik2"), ); // we can create an instance from another instance with some updated values let user3 = User { email: String::from("test2@yahoo.com"), ..user1 }; println!("{:?}", user3); //println!("{:?}", user1); // String in the username field was moved into user2 let black = Color(1, 2, 3); let origin = Point(1, 2, 3); println!("accessing tuple values {} {} {}", black.0, black.1, black.2) }
#![cfg(feature = "serde")] use std::alloc::System; use serde::{Deserialize, Serialize}; use stats_alloc::{StatsAlloc, INSTRUMENTED_SYSTEM}; use abin::{NewStr, Str, StrFactory}; use utils::*; #[global_allocator] static GLOBAL: &StatsAlloc<System> = &INSTRUMENTED_SYSTEM; pub mod utils; /// Demonstrates how to use serde using strings; #[test] fn serialize_deserialize() { deserialize_serialize_small(); deserialize_serialize_large(); } /// De-serialization / serialization with small strings that can be stack-allocated /// (no allocation). fn deserialize_serialize_small() { let short = "short"; // note: no allocation here let entity = mem_scoped( &GLOBAL, &MaAnd(&[ &MaExactNumberOfAllocations(0), &MaExactNumberOfReAllocations(0), &MaExactNumberOfDeAllocations(0), ]), || { Entity { id: 45, // here we create a short string (stack-allocated) string_a: NewStr::from_static(short), // empty: so stack allocated string_b: NewStr::from_static(""), } }, ); let as_vec = serde_cbor::to_vec(&entity).unwrap(); // note: no allocation here mem_scoped( &GLOBAL, &MaAnd(&[ &MaExactNumberOfAllocations(0), &MaExactNumberOfReAllocations(0), &MaExactNumberOfDeAllocations(0), ]), || { let restored: Entity = serde_cbor::from_slice(as_vec.as_slice()).unwrap(); // must be equal assert_eq!(entity, restored); }, ); } /// De-serialization / serialization with large binaries (allocation required). fn deserialize_serialize_large() { // note: no allocation here let entity = mem_scoped( &GLOBAL, &MaAnd(&[ &MaExactNumberOfAllocations(0), &MaExactNumberOfReAllocations(0), &MaExactNumberOfDeAllocations(0), ]), || Entity { id: 45, string_a: NewStr::from_static( "This is somewhat longer; this will not fit \ on stack - longer - even longer.", ), string_b: NewStr::from_static( "Longer and longer and longer and longer and \ even longer... again, even longer. Longer and longer.", ), }, ); let as_vec = serde_cbor::to_vec(&entity).unwrap(); // note: 2 allocations (& de-allocations) here. mem_scoped( &GLOBAL, &MaAnd(&[ &MaExactNumberOfAllocations(2), &MaExactNumberOfReAllocations(0), &MaExactNumberOfDeAllocations(2), ]), || { let restored: Entity = serde_cbor::from_slice(as_vec.as_slice()).unwrap(); // must be equal assert_eq!(entity, restored); }, ); } #[derive(Deserialize, Serialize, Eq, PartialEq, Clone, Debug)] pub struct Entity { pub id: u64, pub string_a: Str, pub string_b: Str, }
//! A [Sudoku puzzle](https://en.wikipedia.org/wiki/Sudoku) is a //! `n^2` × `n^2` array with sub-arrays of size `n` × `n`. Each row, column, and //! sub-array contains the values `1` through `n` with no repeats. use super::{latin_square, ExactCover}; use std::collections::HashSet; /// An instance of a Sudoku puzzle. #[derive(Debug)] pub struct Sudoku { /// The list of possible values and positions that are valid for this Sudoku /// puzzle. pub possibilities: Vec<Possibility>, /// The list of constraints that must be satisfied for this Sudoku puzzle. pub constraints: Vec<Constraint>, } impl Sudoku { /// Create a new new Sudoku puzzle. /// /// The puzzle has size `n^2` × `n^2` (where `n = box_side_length`) and the /// given list of filled values. pub fn new( box_side_length: usize, filled_values: impl IntoIterator<Item = latin_square::Possibility>, ) -> Self { let side_length = box_side_length * box_side_length; let filled_values: Vec<_> = filled_values.into_iter().collect(); let latin = latin_square::LatinSquare::new(side_length, filled_values.iter().copied()); let satisfied: HashSet<_> = filled_values .iter() .copied() .map(|latin_poss| Possibility::from_latin(latin_poss, box_side_length)) .flat_map(Possibility::satisfied_constraints) .collect(); let possibilities = latin .possibilities .into_iter() .map(|latin_poss| Possibility::from_latin(latin_poss, box_side_length)) .collect(); let constraints = latin .constraints .into_iter() .map(Constraint::from) .chain(Constraint::all_square_number(box_side_length)) .filter(|cons| !satisfied.contains(cons)) .collect(); Self { possibilities, constraints, } } } impl ExactCover for Sudoku { type Constraint = Constraint; type Possibility = Possibility; fn satisfies(&self, poss: &Self::Possibility, cons: &Self::Constraint) -> bool { use Constraint::*; match cons { Latin(latin_cons) => { <Possibility as Into<latin_square::Possibility>>::into(*poss).satisfies(latin_cons) } SquareNumber { square, value } => poss.square == *square && poss.value == *value, } } fn is_optional(&self, _cons: &Self::Constraint) -> bool { false } fn possibilities(&self) -> &[Self::Possibility] { &self.possibilities } fn constraints(&self) -> &[Self::Constraint] { &self.constraints } } /// A position and value for a box inside of a Sudoku puzzle. #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct Possibility { /// The row position of the box. /// /// The values ranges from 0 to `n - 1`, where `n` is the length of the /// Sudoku board. pub row: usize, /// The column position of the box. /// /// The values ranges from 0 to `n - 1`, where `n` is the length of the /// Sudoku board. pub column: usize, /// The index of the subgrid. /// /// The values ranges from 0 to `n - 1`, where `n` is the length of the /// Sudoku board. This field is redundant in identifying where the box is /// inside of the Sudoku board, however it is necessary to speed up checking /// which `Constraint`s are satisfied by this `Possibility`. pub square: usize, /// The value present inside of the box. /// /// The values ranges from 1 to `n`, where `n` is the length of the /// Sudoku board. pub value: usize, } impl Possibility { /// Convert a `latin_square::Possibility` to a `sudoku::Possibility`. pub fn from_latin(latin: latin_square::Possibility, box_side_length: usize) -> Self { let side_length = box_side_length * box_side_length; let index = latin.row * side_length + latin.column; let square = ((index % side_length) / box_side_length) + box_side_length * (index / (side_length * box_side_length)); Possibility { row: latin.row, column: latin.column, value: latin.value, square, } } /// Return an iterator over the `Constraint`s that are satisfied by this /// `Possibility`. pub fn satisfied_constraints(self) -> impl Iterator<Item = Constraint> { [ Constraint::Latin(latin_square::Constraint::RowNumber { row: self.row, value: self.value, }), Constraint::Latin(latin_square::Constraint::ColumnNumber { column: self.column, value: self.value, }), Constraint::Latin(latin_square::Constraint::RowColumn { row: self.row, column: self.column, }), Constraint::SquareNumber { square: self.square, value: self.value, }, ] .into_iter() } } impl From<Possibility> for latin_square::Possibility { fn from(src: Possibility) -> Self { latin_square::Possibility { row: src.row, column: src.column, value: src.value, } } } /// A condition which must be satisfied in order to solve a Sudoku puzzle. #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub enum Constraint { /// A constraint which is also shared by a Latin Square puzzle. Latin(latin_square::Constraint), /// A condition that each square (or sub-grid) should only have a single /// instance of a numeric value. SquareNumber { /// The square index. square: usize, /// The unique numeric value value: usize, }, } impl Constraint { fn all_square_number(box_side_length: usize) -> impl Iterator<Item = Constraint> { let side_length = box_side_length * box_side_length; crate::util::two_combination_iter([side_length, side_length + 1], [0, 1]) .map(|[square, value]| Constraint::SquareNumber { square, value }) } } impl From<latin_square::Constraint> for Constraint { fn from(src: latin_square::Constraint) -> Self { Constraint::Latin(src) } } #[cfg(test)] mod tests { use super::*; fn p(row: usize, column: usize, square: usize, value: usize) -> Possibility { Possibility { row, column, square, value, } } fn c_row(row: usize, value: usize) -> Constraint { Constraint::Latin(latin_square::Constraint::RowNumber { row, value }) } fn c_col(column: usize, value: usize) -> Constraint { Constraint::Latin(latin_square::Constraint::ColumnNumber { column, value }) } fn c_row_col(row: usize, column: usize) -> Constraint { Constraint::Latin(latin_square::Constraint::RowColumn { row, column }) } fn c_square(square: usize, value: usize) -> Constraint { Constraint::SquareNumber { square, value } } #[test] fn check_generated_possibilities_constraints() { let mut sudoku = Sudoku::new( 2, vec![ // top row latin_square::tests::p(0, 0, 1), latin_square::tests::p(0, 1, 2), latin_square::tests::p(0, 2, 3), latin_square::tests::p(0, 3, 4), // middle bits latin_square::tests::p(1, 0, 3), latin_square::tests::p(2, 0, 2), latin_square::tests::p(1, 3, 2), latin_square::tests::p(2, 3, 3), // bottom row latin_square::tests::p(3, 0, 4), latin_square::tests::p(3, 1, 3), latin_square::tests::p(3, 2, 2), latin_square::tests::p(3, 3, 1), ], ); sudoku.possibilities.sort(); assert_eq!( sudoku.possibilities, vec![ p(1, 1, 0, 1), p(1, 1, 0, 2), p(1, 1, 0, 3), p(1, 1, 0, 4), p(1, 2, 1, 1), p(1, 2, 1, 2), p(1, 2, 1, 3), p(1, 2, 1, 4), p(2, 1, 2, 1), p(2, 1, 2, 2), p(2, 1, 2, 3), p(2, 1, 2, 4), p(2, 2, 3, 1), p(2, 2, 3, 2), p(2, 2, 3, 3), p(2, 2, 3, 4), ] ); sudoku.constraints.sort(); assert_eq!( sudoku.constraints, vec![ c_row(1, 1), c_row(1, 4), c_row(2, 1), c_row(2, 4), c_col(1, 1), c_col(1, 4), c_col(2, 1), c_col(2, 4), c_row_col(1, 1), c_row_col(1, 2), c_row_col(2, 1), c_row_col(2, 2), c_square(0, 4), c_square(1, 1), c_square(2, 1), c_square(3, 4), ] ); } #[test] fn solve_small_sudoku() { let sudoku = Sudoku::new( 2, vec![ // top row latin_square::tests::p(0, 0, 1), latin_square::tests::p(0, 1, 2), latin_square::tests::p(0, 2, 3), latin_square::tests::p(0, 3, 4), // middle bits latin_square::tests::p(1, 0, 3), latin_square::tests::p(2, 0, 2), latin_square::tests::p(1, 3, 2), latin_square::tests::p(2, 3, 3), // bottom row latin_square::tests::p(3, 0, 4), latin_square::tests::p(3, 1, 3), latin_square::tests::p(3, 2, 2), latin_square::tests::p(3, 3, 1), ], ); let mut solver = sudoku.solver(); let solutions = solver.all_solutions(); assert_eq!(solutions.len(), 1); assert_eq!( solutions[0], vec![ &p(1, 1, 0, 4), &p(1, 2, 1, 1), &p(2, 1, 2, 1), &p(2, 2, 3, 4) ] ); } }
use crate::util::Part; use std::collections::HashMap; pub fn solve(input:String, part:Part) -> String { let result = match part { Part::Part1 => part1(input.as_str()), Part::Part2 => part2(input.as_str()) }; format!("{}",result) } fn part1(input:&str) -> u64 { count_or_for_one_fuel(parse_input(input)) } fn part2(_input:&str) -> u64 { 2 } #[derive(Debug,Clone)] struct Component { qty:u64, material:String, } #[derive(Debug,Clone)] struct Reaction { output:Component, input:Vec<Component>, } fn parse_reaction(line:&str) -> Reaction { let parts:Vec<&str> = line.split("=>").map(|s| s.trim()).collect(); let output_pair:Vec<&str> = parts[1].split(' ').map(|s| s.trim()).collect(); let output : Component = Component{qty:output_pair[0].parse().unwrap(), material:output_pair[1].to_string()}; let material:Vec<&str> = parts[0].split(",").map(|s| s.trim()).collect(); let materials : Vec<Component> = material.iter().map(|&s| { let l : Vec<&str>= s.split(' ').collect(); Component{material:String::from(l[1]), qty:l[0].parse().unwrap()} } ).collect(); Reaction{output:output, input:materials} } fn parse_input(input:&str) -> Vec<Reaction> { let reactions:Vec<Reaction> = input.lines().map( |s| parse_reaction(s)).collect(); reactions } fn int_div_round_up(denominator:u64, divisor:u64) -> u64 { if denominator % divisor > 0 { return (denominator / divisor) + 1; } denominator / divisor } fn get_required_multiple(req_qty:u64, multiple:u64) -> u64 { if multiple == 1 { return req_qty; } else if multiple < req_qty { // Ex. required is 9 but multiple of 7 let rest = req_qty % multiple; if rest > 0 { return 1+(req_qty / multiple); } else { return req_qty / multiple; } } else { // Ex. Required is 5 but multiple is 9 1 } } fn count_or_for_one_fuel(reactions:Vec<Reaction>) -> u64 { count_ore_per_fuel(&reactions, &mut HashMap::new()) } fn count_or_for_n_fuel(reactions:Vec<Reaction>, total_ore:u64) -> u64 { let mut surplus_map:HashMap<String, u64> = HashMap::new(); let mut ore_acc = 0; let mut fuel_acc:u64 = 0; while ore_acc < total_ore { ore_acc += count_ore_per_fuel(&reactions, &mut surplus_map); fuel_acc += 1; if surplus_map.is_empty() { println!("=> at fuel {} and ore {}, surplus is empty.", fuel_acc, ore_acc); break; } } let fuel_mult = fuel_acc; let ore_mult = ore_acc; let mult = total_ore / ore_mult; ore_acc = mult * ore_mult; fuel_acc = mult * fuel_mult; while ore_acc < total_ore { ore_acc += count_ore_per_fuel(&reactions, &mut surplus_map); if ore_acc <= total_ore { fuel_acc += 1; } } fuel_acc } fn count_ore_per_fuel(reactions:&Vec<Reaction>, surplus_map:&mut HashMap<String, u64>) -> u64 { // Build map of materials let mut material_map:HashMap<String, u64> = HashMap::new(); let mut reaction_map:HashMap<String, &Reaction> = HashMap::new(); for reaction in reactions.iter() { reaction_map.insert(reaction.output.material.clone(), reaction); } // Get fuel let mut stack:Vec<Component> = vec![]; let mut ore_stack:std::collections::HashSet<String> = std::collections::HashSet::new(); stack.push( Component{material:String::from("FUEL"), qty:1}); while !stack.is_empty() { // pop material let material = stack.pop().unwrap(); // Get reaction let &reaction = reaction_map.get(&material.material).unwrap(); let mut bank_qty = surplus_map.remove(&material.material).or(Some(0)).unwrap(); let mut lowest_mult = 0; if bank_qty > 0 { //println!(" => Found {} rest qty for {}", bank_qty, material.material); } // Determine how may times we need to run this reaction while bank_qty < material.qty { bank_qty += reaction.output.qty; lowest_mult += 1; } // Rest qty? let rest_qty = bank_qty - material.qty; // Update surplus map if rest_qty > 0 { surplus_map.insert(material.material.clone(), rest_qty); //println!(" => Added {} rest qty for {}", rest_qty, material.material); } // Could make it on rest material if lowest_mult == 0 { continue; } // Push new materials required for comp in reaction.input.iter() { let total_qty = lowest_mult * comp.qty; if !comp.material.as_str().eq("ORE") { //println!("Reaction {:?}", reaction); //println!("To produce {} {} I need {} {}", material.qty, material.material, total_qty ,comp.material); stack.push(Component { qty: total_qty, material: comp.material.clone() }); // Add to requirement map if material_map.contains_key(&comp.material) { let cnt = material_map.get_mut(&comp.material).unwrap(); *cnt += total_qty; //println!(" Updated need for {} to {}", comp.material, cnt); } else { material_map.insert(comp.material.clone(), total_qty); //println!(" Need for {} to {}", comp.material, total_qty); } } else { // Ore transition ore_stack.insert(material.material.clone()); } } } // Transition from Ore to Materials let mut ore_qty = 0; for material_str in ore_stack { let material_qty = material_map.get(&material_str).unwrap(); //println!("I need to produce {} of {}", material_qty, material_str); let &reaction = reaction_map.get(&material_str).unwrap(); //println!("Reaction: {:?}", reaction); let mult = get_required_multiple(*material_qty, reaction.output.qty); ore_qty += mult * reaction.input.first().unwrap().qty; //println!("Required ore: {}", mult * reaction.input.first().unwrap().qty); } ore_qty } #[cfg(test)] mod tests { // Note this useful idiom: importing names from outer (for mod tests) scope. use super::*; #[test] fn test1() { println!("Test"); let input = "3 A, 4 B => 1 AB"; let reaction = parse_reaction(input); println!("{:?}",reaction); } #[test] fn test2() { println!("Test"); let input = "9 ORE => 2 A 8 ORE => 3 B 7 ORE => 5 C 3 A, 4 B => 1 AB 5 B, 7 C => 1 BC 4 C, 1 A => 1 CA 2 AB, 3 BC, 4 CA => 1 FUEL"; let reactions = parse_input(input); println!("{:?}",reactions); } #[test] fn test3() { println!("Test"); let input = "9 ORE => 2 A 8 ORE => 3 B 7 ORE => 5 C 3 A, 4 B => 1 AB 5 B, 7 C => 1 BC 4 C, 1 A => 1 CA 2 AB, 3 BC, 4 CA => 1 FUEL"; let res = count_or_for_one_fuel(parse_input(input)); println!("{:?}",res); assert_eq!(165, res); } #[test] fn test4() { println!("Test"); let input = "10 ORE => 10 A 1 ORE => 1 B 7 A, 1 B => 1 C 7 A, 1 C => 1 D 7 A, 1 D => 1 E 7 A, 1 E => 1 FUEL"; let res = count_or_for_one_fuel(parse_input(input)); println!("{:?}",res); assert_eq!(31, res); } #[test] fn test5() { println!("Test"); let input = "157 ORE => 5 NZVS 165 ORE => 6 DCFZ 44 XJWVT, 5 KHKGT, 1 QDVJ, 29 NZVS, 9 GPVTF, 48 HKGWZ => 1 FUEL 12 HKGWZ, 1 GPVTF, 8 PSHF => 9 QDVJ 179 ORE => 7 PSHF 177 ORE => 5 HKGWZ 7 DCFZ, 7 PSHF => 2 XJWVT 165 ORE => 2 GPVTF 3 DCFZ, 7 NZVS, 5 HKGWZ, 10 PSHF => 8 KHKGT"; let res = count_or_for_one_fuel(parse_input(input)); println!("{:?}",res); assert_eq!(13312, res); } #[test] fn test_part2_test1() { println!("Test"); let input = "157 ORE => 5 NZVS 165 ORE => 6 DCFZ 44 XJWVT, 5 KHKGT, 1 QDVJ, 29 NZVS, 9 GPVTF, 48 HKGWZ => 1 FUEL 12 HKGWZ, 1 GPVTF, 8 PSHF => 9 QDVJ 179 ORE => 7 PSHF 177 ORE => 5 HKGWZ 7 DCFZ, 7 PSHF => 2 XJWVT 165 ORE => 2 GPVTF 3 DCFZ, 7 NZVS, 5 HKGWZ, 10 PSHF => 8 KHKGT"; let total_ore = 1000_000_000_000; let produced_fuel = count_or_for_n_fuel(parse_input(input), total_ore); println!("{} ORE gives {} fuel", total_ore, produced_fuel); } #[test] fn test6() { println!("Test"); let input = "2 VPVL, 7 FWMGM, 2 CXFTF, 11 MNCFX => 1 STKFG 17 NVRVD, 3 JNWZP => 8 VPVL 53 STKFG, 6 MNCFX, 46 VJHF, 81 HVMC, 68 CXFTF, 25 GNMV => 1 FUEL 22 VJHF, 37 MNCFX => 5 FWMGM 139 ORE => 4 NVRVD 144 ORE => 7 JNWZP 5 MNCFX, 7 RFSQX, 2 FWMGM, 2 VPVL, 19 CXFTF => 3 HVMC 5 VJHF, 7 MNCFX, 9 VPVL, 37 CXFTF => 6 GNMV 145 ORE => 6 MNCFX 1 NVRVD => 8 CXFTF 1 VJHF, 6 MNCFX => 4 RFSQX 176 ORE => 6 VJHF"; let res = count_or_for_one_fuel(parse_input(input)); println!("{:?}",res); assert_eq!(180697, res); } #[test] fn test_part2_test2() { println!("Test"); let input = "2 VPVL, 7 FWMGM, 2 CXFTF, 11 MNCFX => 1 STKFG 17 NVRVD, 3 JNWZP => 8 VPVL 53 STKFG, 6 MNCFX, 46 VJHF, 81 HVMC, 68 CXFTF, 25 GNMV => 1 FUEL 22 VJHF, 37 MNCFX => 5 FWMGM 139 ORE => 4 NVRVD 144 ORE => 7 JNWZP 5 MNCFX, 7 RFSQX, 2 FWMGM, 2 VPVL, 19 CXFTF => 3 HVMC 5 VJHF, 7 MNCFX, 9 VPVL, 37 CXFTF => 6 GNMV 145 ORE => 6 MNCFX 1 NVRVD => 8 CXFTF 1 VJHF, 6 MNCFX => 4 RFSQX 176 ORE => 6 VJHF"; let total_ore = 1000_000_000_000; let produced_fuel = count_or_for_n_fuel(parse_input(input), total_ore); println!("{} ORE gives {} fuel", total_ore, produced_fuel); } #[test] fn test7() { println!("Test"); let input = "171 ORE => 8 CNZTR 7 ZLQW, 3 BMBT, 9 XCVML, 26 XMNCP, 1 WPTQ, 2 MZWV, 1 RJRHP => 4 PLWSL 114 ORE => 4 BHXH 14 VRPVC => 6 BMBT 6 BHXH, 18 KTJDG, 12 WPTQ, 7 PLWSL, 31 FHTLT, 37 ZDVW => 1 FUEL 6 WPTQ, 2 BMBT, 8 ZLQW, 18 KTJDG, 1 XMNCP, 6 MZWV, 1 RJRHP => 6 FHTLT 15 XDBXC, 2 LTCX, 1 VRPVC => 6 ZLQW 13 WPTQ, 10 LTCX, 3 RJRHP, 14 XMNCP, 2 MZWV, 1 ZLQW => 1 ZDVW 5 BMBT => 4 WPTQ 189 ORE => 9 KTJDG 1 MZWV, 17 XDBXC, 3 XCVML => 2 XMNCP 12 VRPVC, 27 CNZTR => 2 XDBXC 15 KTJDG, 12 BHXH => 5 XCVML 3 BHXH, 2 VRPVC => 7 MZWV 121 ORE => 7 VRPVC 7 XCVML => 6 RJRHP 5 BHXH, 4 VRPVC => 5 LTCX"; let res = count_or_for_one_fuel(parse_input(input)); println!("{:?}",res); assert_eq!(2210736, res); } //#[test] fn part2_test3() { println!("Test"); let input = "171 ORE => 8 CNZTR 7 ZLQW, 3 BMBT, 9 XCVML, 26 XMNCP, 1 WPTQ, 2 MZWV, 1 RJRHP => 4 PLWSL 114 ORE => 4 BHXH 14 VRPVC => 6 BMBT 6 BHXH, 18 KTJDG, 12 WPTQ, 7 PLWSL, 31 FHTLT, 37 ZDVW => 1 FUEL 6 WPTQ, 2 BMBT, 8 ZLQW, 18 KTJDG, 1 XMNCP, 6 MZWV, 1 RJRHP => 6 FHTLT 15 XDBXC, 2 LTCX, 1 VRPVC => 6 ZLQW 13 WPTQ, 10 LTCX, 3 RJRHP, 14 XMNCP, 2 MZWV, 1 ZLQW => 1 ZDVW 5 BMBT => 4 WPTQ 189 ORE => 9 KTJDG 1 MZWV, 17 XDBXC, 3 XCVML => 2 XMNCP 12 VRPVC, 27 CNZTR => 2 XDBXC 15 KTJDG, 12 BHXH => 5 XCVML 3 BHXH, 2 VRPVC => 7 MZWV 121 ORE => 7 VRPVC 7 XCVML => 6 RJRHP 5 BHXH, 4 VRPVC => 5 LTCX"; let total_ore = 1000_000_000_000; let produced_fuel = count_or_for_n_fuel(parse_input(input), total_ore); println!("{} ORE gives {} fuel", total_ore, produced_fuel); } #[test] fn test_part1() { println!("Test"); let input = "12 JSMPL, 1 RFSHT => 8 NLTCF 6 LTSZQ, 22 KLSMX, 12 CWLGT => 2 MZXFC 4 WMVD, 3 PLBT, 1 ZKDMR => 5 CWLGT 5 SDTGC => 2 LSFKV 189 ORE => 3 TNTDN 20 CZKW => 4 BGNFD 5 XFMH => 7 SFRQ 7 NLTCF => 1 KLSMX 1 NLTCF => 4 HTDFH 2 RFPT, 5 JFXPH => 5 KRCQ 178 ORE => 7 XGLBX 1 NHQH => 3 NDMT 4 BNVTZ, 13 KXFJ, 14 QRBK, 56 SJSLP, 18 SPFP, 9 WMVD, 12 JFXPH, 1 MHXF => 1 FUEL 1 XQRX, 2 DPRVM, 1 HTDFH, 24 NLTCF, 8 SPBXP, 20 TSRNS, 2 VJDBK, 1 PXKL => 7 SPFP 6 WMVD => 3 SPBXP 1 XGLBX => 8 QXLMV 1 PLBT => 5 ZKDMR 25 VJDBK, 5 MZXFC, 3 BDGCJ => 9 BNVTZ 2 TNTDN, 1 SZNCS => 2 LMXBH 3 TNTDN => 6 RVRD 4 RFPT => 6 VHMQ 7 QXLMV, 1 LMXBH, 4 CSZP => 8 XFMH 5 SZNCS => 5 JSMPL 5 MHXF, 5 LTSZQ => 4 RFPT 5 XQMBJ, 1 BGNFD, 5 TQPGR => 3 NHQH 10 CHWS => 2 BDGCJ 19 DPRVM, 13 NHQH, 7 CZKW => 6 FWMXM 1 KLSMX, 1 PLBT, 5 XFMH => 3 SDTGC 20 LMXBH => 9 RFSHT 3 XGLBX => 1 TNPVZ 3 FBWF => 7 WMVD 1 QXLMV, 1 LMXBH => 3 ZMNV 5 JSMPL, 12 SFRQ => 8 CZKW 2 TNPVZ => 9 MHXF 2 MNVX, 1 RBMLP, 6 LSFKV => 9 VJDBK 26 SZNCS, 1 XGLBX => 6 CSZP 6 FBWF, 2 SPBXP, 4 BDGCJ => 2 TQPGR 5 LSFKV, 5 DPRVM => 9 QNFC 33 BDGCJ, 3 CWLGT => 4 XQRX 2 TQPGR, 22 LSFKV, 2 RFPT, 1 BDGCJ, 1 ZKDMR, 7 TSRNS, 6 DPRVM, 11 KRCQ => 2 QRBK 13 XQRX, 3 FWMXM, 2 CWLGT, 1 XQMBJ, 3 BGNFD, 6 HTDFH, 10 TSRNS => 5 KXFJ 1 ZKDMR => 9 CHWS 14 MNVX, 5 XFMH => 7 LTSZQ 2 NDMT, 2 QNFC, 11 ZMNV => 6 PXKL 7 SFRQ => 5 MNVX 2 WMPKD, 1 QXLMV => 9 SJSLP 14 JFXPH => 3 XQMBJ 14 SFRQ => 7 FBWF 1 WMPKD, 30 GBQGR, 4 SPBXP => 9 DPRVM 129 ORE => 4 SZNCS 5 JSMPL => 8 JFXPH 9 JFXPH, 2 VHMQ => 5 RBMLP 6 JSMPL => 7 GBQGR 25 SFRQ, 19 HRMT => 5 WMPKD 3 ZMNV => 9 PLBT 7 ZMNV, 9 RVRD, 8 SFRQ => 7 HRMT 8 RBMLP => 6 TSRNS"; let res = count_or_for_one_fuel(parse_input(input)); println!("{:?}",res); assert_eq!(319014, res); } }
use amethyst::{ ecs::*, ui::{UiFinder, UiText}, }; pub struct EntityCountSystem { entity_count_ui: Option<Entity>, } impl EntityCountSystem { pub fn new() -> Self { EntityCountSystem { entity_count_ui: None, } } } impl<'s> System<'s> for EntityCountSystem { type SystemData = (UiFinder<'s>, WriteStorage<'s, UiText>, Entities<'s>); fn run(&mut self, data: Self::SystemData) { self.update_ui(data); } } impl EntityCountSystem { fn update_ui(&mut self, data: <EntityCountSystem as System>::SystemData) -> Option<()> { let (finder, mut texts, entities) = data; if self.entity_count_ui.is_none() { self.entity_count_ui = finder.find("debug_entities_count"); } let ui = self.entity_count_ui?; let mut count = 0; for _ in (&*entities,).join() { count = count + 1; } let text_ui = texts.get_mut(ui)?; text_ui.text = format!("entity: {}", count); Some(()) } }
use std::vec::Vec; use std::collections::HashMap; use std::f32::consts::PI; struct Occupations { n_electrons: u32, k_pf: f32, ee_pf: f32, occ: Vec<(i32, i32, i32)> } fn build_occupations(r_s: f32, k2_max: f32) -> Occupations { let k_limits = 2*k2_max.sqrt().ceil() as i32; let mut occ: Vec<(i32, i32, i32)> = Vec::new(); let mut unocc: Vec<(i32, i32, i32)> = Vec::new(); for x in -k_limits..k_limits { for y in -k_limits..k_limits { for z in -k_limits..k_limits { let kpoint:(i32, i32, i32) = (x,y,z); if ((x*x + y*y + z*z) as f32) < k2_max { occ.push(kpoint); } else { unocc.push(kpoint); } }; }; }; let mut k_map :HashMap<(i32, i32, i32),bool> = HashMap::new(); for &k_pnt in occ.iter() { k_map.insert(k_pnt,true); } for &k_pnt in unocc.iter() { k_map.insert(k_pnt,false); } let n_electrons = occ.len() as u32; let volume = 4.0/3.0*PI*(n_electrons as f32)*r_s.powi(3); let length: f32 = volume.powf((1.0/3.0) as f32); let k_pf: f32 = (8.0*PI)*length.powi(-2)/(n_electrons as f32) as f32; let ee_pf: f32 = -2.0/(length*PI)/(n_electrons as f32); Occupations{ n_electrons: n_electrons, k_pf: k_pf, ee_pf: ee_pf, occ: occ } } fn get_energy(occ: &Occupations) -> f32 { let mut ke: f32 = 0.0; let mut ee: f32 = 0.0; for index_0 in 0..occ.n_electrons { let k_point = occ.occ[index_0 as usize]; ke += (k_point.0.pow(2) + k_point.1.pow(2) + k_point.2.pow(2)) as f32; for index_1 in index_0+1..occ.n_electrons { let j_point = occ.occ[index_1 as usize]; let dx2 = (k_point.0 - j_point.0).pow(2); let dy2 = (k_point.1 - j_point.1).pow(2); let dz2 = (k_point.2 - j_point.2).pow(2); ee += ((dx2+dy2+dz2) as f32).powi(-1); } } ke*occ.k_pf + ee*occ.ee_pf as f32 } #[no_mangle] pub extern fn get_heg_info(r_s: f32, k2_max: f32) -> f32 { let electrons = build_occupations(r_s,k2_max); let energy = get_energy(&electrons); return energy; }
#![warn( clippy::all, clippy::nursery, clippy::pedantic, missing_copy_implementations, missing_debug_implementations, rust_2018_idioms, unused_qualifications )] #![allow( clippy::doc_markdown, clippy::enum_glob_use, clippy::module_name_repetitions, clippy::must_use_candidate, clippy::similar_names, clippy::single_match_else, clippy::wildcard_imports, dead_code, elided_lifetimes_in_paths )] pub mod nodes; pub mod tokens; pub use crate::{nodes::*, tokens::*}; pub use walrus_lexer::Span;
struct TestFile { inner: Box<dyn Iterator<Item = &'static str>>, } impl TestFile { fn new(contents: &'static str) -> Self { Self { inner: Box::new( contents .lines() .filter(|s| !s.is_empty() && !s.starts_with('#')), ), } } fn read_line(&mut self) -> Option<&'static str> { self.inner.next() } fn consume(&mut self, name: &str) -> Option<&'static str> { self.read_line().map(|s| { let prefix = format!("{} = ", name); assert!(s.starts_with(&prefix), "unexpected line"); &s[prefix.len()..] }) } fn consume_bytes(&mut self, name: &str) -> Option<Vec<u8>> { self.consume(name).map(|s| hex::decode(s).unwrap()) } fn consume_u64(&mut self, name: &str) -> Option<u64> { self.consume(name).map(|s| s.parse().unwrap()) } } macro_rules! test_file { ($file_prefix:literal, $file_suffix:literal) => {{ TestFile::new(include_str!(concat!( "data/", $file_prefix, $file_suffix, ".rsp" ))) }}; } macro_rules! known_answer_test { ($name:ident, $ty:ty, $file_prefix:literal, $file_suffix:literal) => { #[test] fn $name() { let mut f = test_file!($file_prefix, $file_suffix); assert_eq!( format!("[L = {}]", <$ty>::DIGEST_SIZE), f.read_line().unwrap() ); while let Some(length) = f.consume_u64("Len") { let mut input = f.consume_bytes("Msg").unwrap(); if length == 0 { assert_eq!(input, &[0]); input.pop(); } assert_eq!(length, (input.len() as u64) * 8); let digest = <$ty>::new().update(&input).finalize(); assert_eq!(&digest[..], &f.consume_bytes("MD").unwrap()[..]); } } }; } macro_rules! monte_carlo { ($ty:ty, $file_prefix:literal) => { #[test] fn monte_carlo() { let mut f = test_file!($file_prefix, "Monte"); assert_eq!( format!("[L = {}]", <$ty>::DIGEST_SIZE), f.read_line().unwrap() ); let mut seed: [u8; <$ty>::DIGEST_SIZE] = [0; <$ty>::DIGEST_SIZE]; seed.copy_from_slice(&f.consume_bytes("Seed").unwrap()); let mut expected_count = 0; while let Some(count) = f.consume_u64("COUNT") { assert_eq!(count, expected_count); expected_count += 1; let mut md_0 = seed; let mut md_1 = seed; let mut md_2 = seed; for _ in 0..1000 { let md_i = <$ty>::new() .update(&md_0) .update(&md_1) .update(&md_2) .finalize(); md_0 = md_1; md_1 = md_2; md_2 = md_i; } assert_eq!(&md_2[..], &f.consume_bytes("MD").unwrap()[..]); seed = md_2; } } }; } macro_rules! tests { ($mod:ident, $ty:ty, $file_prefix:literal) => { mod $mod { use super::TestFile; known_answer_test!(short_msg, $ty, $file_prefix, "ShortMsg"); known_answer_test!(long_msg, $ty, $file_prefix, "LongMsg"); monte_carlo!($ty, $file_prefix); } }; } tests!(sha224, sha2_const::Sha224, "SHA224"); tests!(sha256, sha2_const::Sha256, "SHA256"); tests!(sha384, sha2_const::Sha384, "SHA384"); tests!(sha512, sha2_const::Sha512, "SHA512"); tests!(sha512_224, sha2_const::Sha512_224, "SHA512_224"); tests!(sha512_256, sha2_const::Sha512_256, "SHA512_256");
use std::cell::RefCell; use wayland_server::{ protocol::{ wl_data_device_manager::DndAction, wl_data_source::{Request, WlDataSource}, }, NewResource, }; /// The metadata describing a data source #[derive(Debug, Clone)] pub struct SourceMetadata { /// The MIME types supported by this source pub mime_types: Vec<String>, /// The Drag'n'Drop actions supported by this source pub dnd_action: DndAction, } pub(crate) fn implement_data_source(src: NewResource<WlDataSource>) -> WlDataSource { src.implement_closure( |req, me| { let data: &RefCell<SourceMetadata> = me.as_ref().user_data().unwrap(); let mut guard = data.borrow_mut(); match req { Request::Offer { mime_type } => guard.mime_types.push(mime_type), Request::SetActions { dnd_actions } => { guard.dnd_action = DndAction::from_bits_truncate(dnd_actions); } Request::Destroy => {} _ => unreachable!(), } }, None::<fn(_)>, RefCell::new(SourceMetadata { mime_types: Vec::new(), dnd_action: DndAction::None, }), ) } /// Access the metadata of a data source pub fn with_source_metadata<T, F: FnOnce(&SourceMetadata) -> T>( source: &WlDataSource, f: F, ) -> Result<T, ()> { match source.as_ref().user_data::<RefCell<SourceMetadata>>() { Some(data) => Ok(f(&data.borrow())), None => Err(()), } }
use std::{ fs::{File, OpenOptions}, path::PathBuf, }; /// Default deps path const DEFAULT_OUT_FILE: &str = "dapp.sol.json"; /// Initializes a tracing Subscriber for logging pub fn subscriber() { tracing_subscriber::FmtSubscriber::builder() // .with_timer(tracing_subscriber::fmt::time::uptime()) .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) // don't need the target .with_target(false) .init(); } /// Default to including all files under current directory in the allowed paths pub fn default_path(path: Vec<String>) -> eyre::Result<Vec<String>> { Ok(if path.is_empty() { vec![".".to_owned()] } else { path }) } /// merge the cli-provided remappings vector with the /// new-line separated env var pub fn merge(mut remappings: Vec<String>, remappings_env: Option<String>) -> Vec<String> { // merge the cli-provided remappings vector with the // new-line separated env var if let Some(env) = remappings_env { remappings.extend_from_slice(&env.split('\n').map(|x| x.to_string()).collect::<Vec<_>>()); // deduplicate the extra remappings remappings.sort_unstable(); remappings.dedup(); } remappings } /// Opens the file at `out_path` for R/W and creates it if it doesn't exist. pub fn open_file(out_path: PathBuf) -> eyre::Result<File> { Ok(if out_path.is_file() { // get the file if it exists OpenOptions::new().write(true).open(out_path)? } else if out_path.is_dir() { // get the directory if it exists & the default file path let out_path = out_path.join(DEFAULT_OUT_FILE); // get a file handler (overwrite any contents of the existing file) OpenOptions::new().write(true).create(true).open(out_path)? } else { // otherwise try to create the entire path // in case it's a directory, we must mkdir it let out_path = if out_path.to_str().ok_or_else(|| eyre::eyre!("not utf-8 path"))?.ends_with('/') { std::fs::create_dir_all(&out_path)?; out_path.join(DEFAULT_OUT_FILE) } else { // if it's a file path, we must mkdir the parent let parent = out_path .parent() .ok_or_else(|| eyre::eyre!("could not get parent of {:?}", out_path))?; std::fs::create_dir_all(parent)?; out_path }; // finally we get the handler OpenOptions::new().write(true).create_new(true).open(out_path)? }) }
use crate::{ binding::{Binder, DebruijnIndex}, fold::{TyFoldable, TyVisitor}, PlaceholderTy, RecordTy, RecordTyData, Ty, TyData, TyDatabase, TyKind, }; use core::{ hash::{Hash, Hasher}, ops::Index, }; use hashbrown::HashMap; use indexmap::IndexSet; use valis_ds::{hashed::Hashed, Untern}; use valis_hir::ids::Identifier; // Some notes: // // Representing the components of a type automaton in a finite manner is // difficult, espcially when functions of the automaton will inevitably require // iterations over an infinite domain. To aid with this problem we will consider // that type automaton will be "scoped" when performing specific operations. // // The "scope" will include all information that is specific to the types (Ty) // under consideration. As a concrete example, the alphabet of the automaton // includes all the items of the set of possible labels. It is unnecessary and // impossible to represent that entire set, so the "scope" object will collect // and store a mapping for all the labels that are used in a given set of types. // // One questions that still arises is what is the set of types under // consideration? When type checking a function, the set of types is all the // types that are used within the definition of the function and its body. This // is a property that can be determined syntactically, by walking the HIR. // // Also, most automata (not necessarily type or scheme automata) require that // the set of possible states is a range without holes. Usually the best // representation for this is a range on some sized integer. However, converting // from a possibly infinite alphabet to a finite integer is difficult. The // scoping comes in handy because it can tell us what the size of labels/ty // under use are, and then construct valid representing integers from that. // Also, we know that those integers are only valid in the scope of the TyScope // objec. valis_ds::typed_index!(@base_no_salsa pub ScopedIdentifier core::u32::MAX); valis_ds::typed_index!(@base_no_salsa pub ScopedTy core::u32::MAX); valis_ds::typed_index!(@base_no_salsa pub ScopedPlaceholder core::u32::MAX); pub type TyScope = Hashed<TyScopeData>; #[derive(Debug, Clone, Eq)] pub struct TyScopeData { pub(crate) label_mapping: IndexSet<Identifier>, pub(crate) ty_mapping: IndexSet<Ty>, pub(crate) placeholder_mapping: IndexSet<PlaceholderTy>, } impl PartialEq for TyScopeData { fn eq(&self, rhs: &Self) -> bool { self.label_mapping.eq(&rhs.label_mapping) && self.ty_mapping.eq(&rhs.ty_mapping) && self.placeholder_mapping.eq(&rhs.placeholder_mapping) } } impl Hash for TyScopeData { fn hash<H: Hasher>(&self, hasher: &mut H) { self.label_mapping .iter() .enumerate() .for_each(|(idx, ident)| { idx.hash(hasher); ident.hash(hasher); }); self.ty_mapping.iter().enumerate().for_each(|(idx, ty)| { idx.hash(hasher); ty.hash(hasher); }); self.placeholder_mapping .iter() .enumerate() .for_each(|(idx, placeholder)| { idx.hash(hasher); placeholder.hash(hasher); }) } } impl TyScopeData { #[inline] pub(crate) fn get_label(&self, idx: ScopedIdentifier) -> Option<&Identifier> { self.label_mapping.get_index(idx.as_usize()) } #[inline] pub(crate) fn get_ty(&self, idx: ScopedTy) -> Option<&Ty> { self.ty_mapping.get_index(idx.as_usize()) } #[inline] pub(crate) fn get_placeholder(&self, idx: ScopedPlaceholder) -> Option<&PlaceholderTy> { self.placeholder_mapping.get_index(idx.as_usize()) } #[inline] pub(crate) fn get_scoped_label(&self, idx: Identifier) -> Option<ScopedIdentifier> { self.label_mapping .get_full(&idx) .map(|(idx, _)| ScopedIdentifier::from(idx)) } #[inline] pub(crate) fn get_scoped_ty(&self, idx: Ty) -> Option<ScopedTy> { self.ty_mapping .get_full(&idx) .map(|(idx, _)| ScopedTy::from(idx)) } #[inline] pub(crate) fn get_scoped_placeholder(&self, idx: PlaceholderTy) -> Option<ScopedPlaceholder> { self.placeholder_mapping .get_full(&idx) .map(|(idx, _)| ScopedPlaceholder::from(idx)) } #[inline] pub(crate) fn num_tys(&self) -> usize { self.ty_mapping.len() } pub(crate) fn filter_tys<F: FnMut(ScopedTy, Ty) -> bool>( &self, mut f: F, ) -> (TyScope, impl Fn(ScopedTy) -> Option<ScopedTy>) { let mut new_ty_mapping = IndexSet::new(); let mut scope_to_scope_mapping = HashMap::new(); let ty_it = self .ty_mapping .iter() .enumerate() .map(|(idx, ty)| (ScopedTy::from(idx), ty)); for (scoped_ty, ty) in ty_it { if f(scoped_ty, *ty) { let (new_idx, _) = new_ty_mapping.insert_full(*ty); let new_scoped_ty = ScopedTy::from(new_idx); scope_to_scope_mapping.insert(scoped_ty, new_scoped_ty); } } let scope = TyScopeData { ty_mapping: new_ty_mapping, label_mapping: self.label_mapping.clone(), placeholder_mapping: self.placeholder_mapping.clone(), } .into(); (scope, move |scoped_ty| { scope_to_scope_mapping.get(&scoped_ty).copied() }) } } impl Index<ScopedIdentifier> for TyScopeData { type Output = Identifier; #[inline] fn index(&self, index: ScopedIdentifier) -> &Self::Output { self.get_label(index).unwrap() } } impl Index<ScopedTy> for TyScopeData { type Output = Ty; #[inline] fn index(&self, index: ScopedTy) -> &Self::Output { self.get_ty(index).unwrap() } } impl Index<ScopedPlaceholder> for TyScopeData { type Output = PlaceholderTy; #[inline] fn index(&self, index: ScopedPlaceholder) -> &Self::Output { self.get_placeholder(index).unwrap() } } #[derive(Debug, Clone)] pub(crate) struct ScopeVisitor<'a, DB> { db: &'a DB, current_index: DebruijnIndex, label_mapping: IndexSet<Identifier>, // collects all labels ty_mapping: IndexSet<Ty>, // does same thing as subterms visitor placeholder_mapping: IndexSet<PlaceholderTy>, // collects all placeholders } impl<'a, DB: TyDatabase> ScopeVisitor<'a, DB> { pub(crate) fn new(db: &'a DB) -> Self { ScopeVisitor { db, current_index: DebruijnIndex::INNERMOST, label_mapping: IndexSet::new(), ty_mapping: IndexSet::new(), placeholder_mapping: IndexSet::new(), } } pub(crate) fn into_scope(self) -> TyScope { Hashed::from(TyScopeData { label_mapping: self.label_mapping, ty_mapping: self.ty_mapping, placeholder_mapping: self.placeholder_mapping, }) } } impl<'a, DB: TyDatabase> TyVisitor for ScopeVisitor<'a, DB> { type DB = DB; type Output = (); #[inline] fn db(&self) -> &Self::DB { self.db } fn default_output(&self) -> Self::Output {} fn combine_output(&self, _o1: Self::Output, _o2: Self::Output) -> Self::Output {} #[inline] fn visit_binder<T: TyFoldable>(&mut self, t: &Binder<T>) -> Self::Output { self.current_index = self.current_index.shifted_in(1); let () = t.super_visit_with(self); self.current_index = self.current_index.shifted_out(1); } #[inline] fn visit_record_ty(&mut self, record_ty: RecordTy) -> Self::Output { let RecordTyData { labels, types } = record_ty.untern(self.tables()); self.label_mapping.extend(labels); types.visit_with(self) } fn visit_ty(&mut self, ty: Ty) -> Self::Output { use TyKind::*; let TyData { kind, .. } = ty.untern(self.tables()); match kind { Boolean | Symbol | Number | Top | Bottom => { self.ty_mapping.insert(ty); }, Record(inner_ty) => { self.ty_mapping.insert(ty); inner_ty.visit_with(self) }, Function(inner_ty) => { self.ty_mapping.insert(ty); inner_ty.visit_with(self) }, LatticeOp(inner_ty) => { self.ty_mapping.insert(ty); inner_ty.visit_with(self) }, Recursive(inner_ty) => { self.ty_mapping.insert(ty); inner_ty.visit_with(self) }, Placeholder(placeholder @ PlaceholderTy::Universal(_)) => { self.ty_mapping.insert(ty); self.placeholder_mapping.insert(placeholder); }, Placeholder(PlaceholderTy::Recursive(idx)) => { self.placeholder_mapping .insert(PlaceholderTy::Recursive(idx)); // if the idx of the placehold is greater than that accounted for by the binders // that have been entered to traverse to this point, then the variable is either // unbound or bound beyond the consideration of the ty_mapping walk (unlikely). // Else the placeholder is bound by one of the binders seen and should be // ommitted from the set of ty_mapping. if idx > self.current_index { self.ty_mapping.insert(ty); } }, }; } }
use crate::{ir::*, types::Type}; use std::collections::*; pub fn collect_variant_types(module: &Module) -> HashSet<Type> { module .definitions() .iter() .flat_map(|definition| collect_from_definition(definition)) .collect() } fn collect_from_definition(definition: &Definition) -> HashSet<Type> { collect_from_expression(definition.body()) } fn collect_from_expression(expression: &Expression) -> HashSet<Type> { match expression { Expression::ArithmeticOperation(operation) => collect_from_expression(operation.lhs()) .drain() .chain(collect_from_expression(operation.rhs())) .collect(), Expression::Case(case) => collect_from_case(case), Expression::ComparisonOperation(operation) => collect_from_expression(operation.lhs()) .drain() .chain(collect_from_expression(operation.rhs())) .collect(), Expression::FunctionApplication(application) => { collect_from_expression(application.function()) .drain() .chain(collect_from_expression(application.argument())) .collect() } Expression::If(if_) => collect_from_expression(if_.condition()) .drain() .chain(collect_from_expression(if_.then())) .chain(collect_from_expression(if_.else_())) .collect(), Expression::Let(let_) => collect_from_expression(let_.bound_expression()) .drain() .chain(collect_from_expression(let_.expression())) .collect(), Expression::LetRecursive(let_) => let_ .definitions() .iter() .flat_map(collect_from_definition) .chain(collect_from_expression(let_.expression())) .collect(), Expression::Record(record) => record .elements() .iter() .flat_map(collect_from_expression) .collect(), Expression::RecordElement(element) => collect_from_expression(element.record()), Expression::Variant(variant) => vec![variant.type_().clone()] .into_iter() .chain(collect_from_expression(variant.payload())) .collect(), Expression::Primitive(_) | Expression::ByteString(_) | Expression::Variable(_) => { Default::default() } } } fn collect_from_case(case: &Case) -> HashSet<Type> { case.alternatives() .iter() .flat_map(|alternative| { vec![alternative.type_().clone()] .into_iter() .chain(collect_from_expression(alternative.expression())) }) .chain( case.default_alternative() .map(collect_from_expression) .unwrap_or_default(), ) .collect() }
// 实现函数 ToLowerCase(),该函数接收一个字符串参数 str,并将该字符串中的大写字母转换成小写字母,之后返回新的字符串。 // 示例 1: // 输入: "Hello" // 输出: "hello" // 示例 2: // 输入: "here" // 输出: "here" // 示例 3: // 输入: "LOVELY" // 输出: "lovely" struct Solution {} impl Solution { pub fn to_lower_case(str: String) -> String { str.to_lowercase() } } #[test] fn test_to_lower_case() { assert_eq!( Solution::to_lower_case(String::from("Hello")), String::from("hello") ); assert_eq!( Solution::to_lower_case(String::from("here")), String::from("here") ); assert_eq!( Solution::to_lower_case(String::from("LOVELY")), String::from("lovely") ); }
use crate::santa; use crate::santa::Move; use rand::seq::SliceRandom; use rand::Rng; use std::collections::VecDeque; pub struct SA { families: santa::FamilyData, families_per_day: Vec<Vec<usize>>, all_fams: Vec<usize>, init_temperature: f32, temperature: f32, pub tabu: TabuList, maxiter: usize, } impl SA { pub fn new(families: santa::FamilyData, init_temperature: f32, maxiter: usize) -> Self { let mut s = Self { families, families_per_day: vec![Vec::new(); 101], all_fams: Vec::new(), init_temperature, temperature: init_temperature, tabu: TabuList::new(1), maxiter, }; for i in 0..5000 { s.all_fams.push(i); } s } pub fn optimize(&mut self, sol: santa::Solution) -> santa::Solution { self.init_families_per_day(&sol); let mut bestsol = sol.clone(); let mut cursol = sol; let mut no_improve_cnt = 0; for i in 0..self.maxiter { self.improve(&mut cursol, 10); if cursol.costs < bestsol.costs { bestsol = cursol.clone(); santa::write_solution(&bestsol, "./data/output/"); no_improve_cnt = 0; } else { no_improve_cnt += 1; if no_improve_cnt >= 20 { //println!("reset"); self.temperature = self.init_temperature; cursol = bestsol.clone(); self.perturbate(&mut cursol); no_improve_cnt = 0; } } if i % 100 == 0 { println!("---> c: {}", bestsol.costs); } } return bestsol; } pub fn set_tabu(&mut self, tabu: TabuList) { self.tabu = tabu } fn improve(&mut self, sol: &mut santa::Solution, tries_per_fam: usize) -> bool { self.all_fams.shuffle(&mut rand::thread_rng()); let c_before = sol.costs; for i in 0..5000 { let x = self.all_fams[i]; for _try in 0..tries_per_fam { if self.find_move(sol, x as u32) { break; } } } c_before != sol.costs } /// Searches for moves (Ai-Bj..) -> (Aj-Bk..) -> .. that improve the given solution. fn find_move(&mut self, sol: &mut santa::Solution, x: u32) -> bool { let mut m = santa::Move::new(); m.candidates.push(x); let mut ind: usize = 0; let maxlen = 3; loop { let x = m.candidates[ind]; let xi = sol.x[x as usize]; m.old_days.push(xi); let xj = self.pick_alternative(x, xi); m.new_days.push(xj); if sol.move_feasible(&self.families, &m) { let delta = sol.score_move(&self.families, &m); if delta > 0.0 || (ind > 1 && self.accept(delta)) { let new_occs = sol.new_occs(&self.families, &m); if self.tabu.already_visited(&new_occs) { return false; } sol.apply_move(&self.families, &m); self.tabu.add(&sol.occupancies); self.init_families_per_day(sol); if delta <= 0.0 { self.temperature *= 0.9; } return true; } } if ind + 1 >= maxlen { break; } else { m.candidates.push(self.pick_from_day(xj)); } ind += 1; } return false; } fn accept(&self, mut delta: f32) -> bool { let r: f32 = rand::thread_rng().gen(); delta /= self.temperature; let prob = delta.exp(); if r < prob { //println!("delta: {}", delta); return true; } else { return false; } } fn perturbate(&mut self, sol: &mut santa::Solution) -> santa::Solution { let mut rng = rand::thread_rng(); let mod_weight = rng.gen_range(0.2, 2.2); println!("perturbate with {}", mod_weight); self.families.set_weight(mod_weight); let mut newsol = self.localsearch(sol.clone(), 2, 10); self.families.set_weight(1.0); newsol.costs = self.families.score(&newsol); newsol } fn localsearch( &mut self, mut sol: santa::Solution, break_after: usize, tries_per_fam: usize, ) -> santa::Solution { self.init_families_per_day(&sol); let mut count_unchanged = 0; loop { let changed = self.improve(&mut sol, tries_per_fam); count_unchanged = if changed { 0 } else { count_unchanged + 1 }; if count_unchanged >= break_after { break; } } sol } fn perturbate2(&mut self, sol: &mut santa::Solution) { // perturbate such that at least one type of costs improve! let mut rng = rand::thread_rng(); loop { let fama = rng.gen_range(0, 5000); let famb = rng.gen_range(0, 5000); let mut m = santa::Move::new(); m.candidates.push(fama as u32); m.old_days.push(sol.x[fama]); m.new_days.push(sol.x[famb]); m.candidates.push(famb as u32); m.old_days.push(sol.x[famb]); m.new_days.push(sol.x[fama]); // return if feasible and either costs improve if sol.move_feasible(&self.families, &m) { let (costs, pcosts, acosts) = sol.score_move_split(&self.families, &m); if costs > -200.0 { if pcosts > 0.0 || acosts > 0.0 { sol.apply_move(&self.families, &m); self.init_families_per_day(sol); return; } } } } } fn pick_from_day(&self, xj: u32) -> u32 { let mut rng = rand::thread_rng(); let fnew_ind = rng.gen_range(0, self.families_per_day[xj as usize].len()); self.families_per_day[xj as usize][fnew_ind] as u32 } // pick alternative from choices or from adjacent days fn pick_alternative(&self, x: u32, xi: u32) -> u32 { let mut rng = rand::thread_rng(); loop { let xji = rng.gen_range(0, 5); let xnew = self.families.choices[x as usize][xji]; if xnew != xi { return xnew; } } } fn init_families_per_day(&mut self, sol: &santa::Solution) { self.families_per_day = vec![Vec::new(); 101]; for fam in 0..5000 { let day = sol.x[fam] as usize; self.families_per_day[day].push(fam); } } } #[derive(Clone)] pub struct TabuList { visited: VecDeque<TabuItem>, nmax: usize, } impl TabuList { pub fn new(nmax: usize) -> Self { Self { visited: VecDeque::new(), nmax, } } // TODO entire solutions and approximate fn add(&mut self, item: TabuItem) { if !self.already_visited(&item) { self.visited.push_front(item); //println!("Added to tabu list"); if self.visited.len() > self.nmax { self.visited.pop_back(); } } } // TODO occupancies archive!!! fn already_visited(&self, item: &TabuItem) -> bool { // TODO binary search let visited = self.visited.iter().any(|x| x == item); if visited { println!("Been here before"); } visited } }
use crate::{tools, tools::rational::Rational}; use ffmpeg_sys_next::*; use std::{char, collections::HashMap, ffi::CString, ptr::null_mut}; #[derive(Clone)] pub struct Stream { pub stream: *mut AVStream, } impl Stream { pub fn new(stream: *mut AVStream) -> Result<Stream, String> { if stream.is_null() { return Err("Null stream pointer".to_string()); } Ok(Stream { stream }) } pub fn get_time_base(&self) -> Rational { unsafe { Rational { num: (*self.stream).time_base.num, den: (*self.stream).time_base.den, } } } pub fn get_codec_name(&self) -> Option<String> { unsafe { let av_codec_id = avcodec_descriptor_get((*(*self.stream).codecpar).codec_id); if av_codec_id.is_null() { None } else { Some(tools::to_string((*av_codec_id).name)) } } } pub fn get_codec_long_name(&self) -> Option<String> { unsafe { let av_codec_id = avcodec_descriptor_get((*(*self.stream).codecpar).codec_id); if av_codec_id.is_null() { None } else { let mut long_name = tools::to_string((*av_codec_id).long_name); if let Some(suffix) = self.get_codec_tag() { long_name.push(' '); long_name.push_str(&suffix); } Some(long_name) } } } pub fn get_codec_tag(&self) -> Option<String> { unsafe { let mut codec_tag = (*(*self.stream).codecpar).codec_tag; let mut codec_tag_str = "".to_string(); for _i in 0..4 { let character = codec_tag & 0xFF; if let Some(c) = char::from_u32(character) { codec_tag_str.push_str(&c.to_string()); } codec_tag >>= 8; } match codec_tag_str.as_str() { "ap4x" => Some("4444 XQ".to_string()), "ap4h" => Some("4444".to_string()), "apch" => Some("422 HQ".to_string()), "apcn" => Some("422".to_string()), "apcs" => Some("422 LT".to_string()), "apco" => Some("422 Proxy".to_string()), _ => None, } } } pub fn get_duration(&self) -> Option<f32> { unsafe { if (*self.stream).duration == AV_NOPTS_VALUE { None } else { Some((*self.stream).duration as f32 * self.get_time_base().to_float()) } } } pub fn get_duration_pts(&self) -> Option<i64> { unsafe { if (*self.stream).duration == AV_NOPTS_VALUE { None } else { Some((*self.stream).duration) } } } pub fn get_nb_frames(&self) -> Option<i64> { unsafe { if (*self.stream).nb_frames == 0 { self.get_duration_pts() } else { Some((*self.stream).nb_frames) } } } pub fn get_picture_aspect_ratio(&self) -> Rational { unsafe { if (*self.stream).sample_aspect_ratio.num == 0 { if (*(*self.stream).codecpar).sample_aspect_ratio.num == 0 { Rational { num: 1, den: 1 } } else { Rational { num: (*(*self.stream).codecpar).sample_aspect_ratio.num, den: (*(*self.stream).codecpar).sample_aspect_ratio.den, } } } else { Rational { num: (*self.stream).sample_aspect_ratio.num, den: (*self.stream).sample_aspect_ratio.den, } } } } pub fn get_start_time(&self) -> Option<f32> { unsafe { if (*self.stream).start_time == AV_NOPTS_VALUE { None } else { Some((*self.stream).start_time as f32 * self.get_time_base().to_float()) } } } pub fn get_width(&self) -> i32 { unsafe { (*(*self.stream).codecpar).width } } pub fn get_height(&self) -> i32 { unsafe { (*(*self.stream).codecpar).height } } pub fn get_display_aspect_ratio(&self) -> Rational { unsafe { if (*(*self.stream).codecpar).sample_aspect_ratio.num == 0 { if (*self.stream).sample_aspect_ratio.num == 0 { Rational { num: (*(*self.stream).codecpar).width, den: (*(*self.stream).codecpar).height, } .reduce() } else { Rational { num: (*(*self.stream).codecpar).width * (*self.stream).sample_aspect_ratio.num, den: (*(*self.stream).codecpar).height * (*self.stream).sample_aspect_ratio.den, } .reduce() } } else { Rational { num: (*(*self.stream).codecpar).width * (*(*self.stream).codecpar).sample_aspect_ratio.num, den: (*(*self.stream).codecpar).height * (*(*self.stream).codecpar).sample_aspect_ratio.den, } .reduce() } } } pub fn get_bit_rate(&self) -> Option<i64> { unsafe { if (*(*self.stream).codecpar).bit_rate == AV_NOPTS_VALUE || (*(*self.stream).codecpar).bit_rate == 0 { None } else { Some((*(*self.stream).codecpar).bit_rate) } } } pub fn get_frame_rate(&self) -> Rational { unsafe { Rational { num: (*self.stream).r_frame_rate.num, den: (*self.stream).r_frame_rate.den, } } } pub fn get_level(&self) -> Option<i32> { unsafe { let level = (*(*self.stream).codecpar).level; if level == FF_LEVEL_UNKNOWN { None } else { Some(level) } } } pub fn get_profile(&self) -> Option<String> { unsafe { let profile = (*(*self.stream).codecpar).profile; if profile == FF_PROFILE_UNKNOWN { None } else { Some(tools::to_string(avcodec_profile_name( (*(*self.stream).codecpar).codec_id, profile, ))) } } } pub fn get_scanning_type(&self) -> Option<String> { unsafe { match (*(*self.stream).codecpar).field_order { AVFieldOrder::AV_FIELD_PROGRESSIVE => Some("progressive".to_string()), AVFieldOrder::AV_FIELD_TT | AVFieldOrder::AV_FIELD_BB | AVFieldOrder::AV_FIELD_TB | AVFieldOrder::AV_FIELD_BT => Some("interlaced".to_string()), _ => None, } } } pub fn get_chroma_sub_sample(&self) -> Option<String> { unsafe { let hshift = &mut 0; let vshift = &mut 0; let pixel_format: AVPixelFormat = std::mem::transmute((*(*self.stream).codecpar).format); if pixel_format == AVPixelFormat::AV_PIX_FMT_NONE { return None; } av_pix_fmt_get_chroma_sub_sample(pixel_format, hshift, vshift); match (hshift, vshift) { (0, 0) => Some("4:4:4".to_string()), (1, 0) => Some("4:2:2".to_string()), (1, 1) => Some("4:2:0".to_string()), (2, 0) => Some("4:1:1".to_string()), (_, _) => Some(tools::to_string(av_get_pix_fmt_name(pixel_format))), } } } pub fn get_pix_fmt_name(&self) -> Option<String> { unsafe { let pixel_format: AVPixelFormat = std::mem::transmute((*(*self.stream).codecpar).format); if pixel_format == AVPixelFormat::AV_PIX_FMT_NONE { return None; } let input_fmt_str = av_get_pix_fmt_name(pixel_format); Some(tools::to_string(input_fmt_str)) } } pub fn get_bits_per_sample(&self) -> i32 { unsafe { av_get_bits_per_sample((*(*self.stream).codecpar).codec_id) } } pub fn get_sample_fmt(&self) -> String { unsafe { let pixel_format: AVSampleFormat = std::mem::transmute((*(*self.stream).codecpar).format); tools::to_string(av_get_sample_fmt_name(pixel_format)) } } pub fn get_sample_rate(&self) -> i32 { unsafe { (*(*self.stream).codecpar).sample_rate } } pub fn get_channels(&self) -> i32 { unsafe { (*(*self.stream).codecpar).channels } } #[cfg(any(ffmpeg_4_4, ffmpeg_5_0, ffmpeg_5_1))] pub fn get_timecode(&self) -> Option<String> { unsafe { let timecode_side_data = av_stream_get_side_data( self.stream, AVPacketSideDataType::AV_PKT_DATA_S12M_TIMECODE, null_mut(), ); let timecode = &mut 0; if timecode_side_data.is_null() { return None; } av_timecode_make_mpeg_tc_string(timecode, timecode_side_data as u32); Some(timecode.to_string()) } } #[cfg(any(ffmpeg_4_0, ffmpeg_4_1, ffmpeg_4_2, ffmpeg_4_3))] pub fn get_timecode(&self) -> Option<String> { unsafe { let tc = &mut 0; let timecode = (*(*self.stream).codec).timecode_frame_start; if timecode < 0 { return None; } av_timecode_make_mpeg_tc_string(tc, timecode as u32); Some(tc.to_string()) } } pub fn get_bits_per_raw_sample(&self) -> Option<i32> { unsafe { if (*(*self.stream).codecpar).bits_per_raw_sample == 0 { None } else { Some((*(*self.stream).codecpar).bits_per_raw_sample) } } } pub fn get_stream_metadata(&self) -> HashMap<String, String> { unsafe { let mut tag = null_mut(); let key = CString::new("").unwrap(); let mut metadata = HashMap::new(); loop { tag = av_dict_get( (*self.stream).metadata, key.as_ptr(), tag, AV_DICT_IGNORE_SUFFIX, ); if tag.is_null() { break; } let k = tools::to_string((*tag).key); let v = tools::to_string((*tag).value); metadata.insert(k.to_string(), v.to_string()); } metadata } } pub fn get_color_range(&self) -> Option<String> { unsafe { if (*(*self.stream).codecpar).color_range == AVColorRange::AVCOL_RANGE_UNSPECIFIED { None } else { let range = av_color_range_name((*(*self.stream).codecpar).color_range); if tools::to_string(range) == "tv" { Some("tv (limited)".to_string()) } else if tools::to_string(range) == "pc" { Some("pc (full)".to_string()) } else { Some(tools::to_string(range)) } } } } pub fn get_color_matrix(&self) -> Option<String> { unsafe { if (*(*self.stream).codecpar).color_space == AVColorSpace::AVCOL_SPC_UNSPECIFIED { None } else { let matrix = av_color_space_name((*(*self.stream).codecpar).color_space); Some(tools::to_string(matrix)) } } } pub fn get_color_trc(&self) -> Option<String> { unsafe { if (*(*self.stream).codecpar).color_trc == AVColorTransferCharacteristic::AVCOL_TRC_UNSPECIFIED { None } else { let trc = av_color_transfer_name((*(*self.stream).codecpar).color_trc); Some(tools::to_string(trc)) } } } pub fn get_color_primaries(&self) -> Option<String> { unsafe { if (*(*self.stream).codecpar).color_primaries == AVColorPrimaries::AVCOL_PRI_UNSPECIFIED { None } else { let primaries = av_color_primaries_name((*(*self.stream).codecpar).color_primaries); Some(tools::to_string(primaries)) } } } pub fn get_color_space(&self) -> Option<String> { unsafe { let pixel_format: AVPixelFormat = std::mem::transmute((*(*self.stream).codecpar).format); let av_pix_fmt_desc = av_pix_fmt_desc_get(pixel_format); if av_pix_fmt_desc.is_null() { None } else { let cxyz = CString::new("xyz").unwrap(); let xyzptr = cxyz.as_ptr(); if (*av_pix_fmt_desc).flags as i32 & AV_PIX_FMT_FLAG_PAL > 0 || (*av_pix_fmt_desc).flags as i32 & AV_PIX_FMT_FLAG_RGB > 0 { Some("RGB".to_string()) } else if (*av_pix_fmt_desc).nb_components == 1 || (*av_pix_fmt_desc).nb_components == 2 { Some("GRAY".to_string()) } else if !(*av_pix_fmt_desc).name.is_null() && strncmp((*av_pix_fmt_desc).name, xyzptr, 3) == 0 { Some("XYZ".to_string()) } else if (*av_pix_fmt_desc).nb_components == 0 { Some("N/A".to_string()) } else { Some("YUV".to_string()) } } } } }
use super::landing_template::landing_template; use stremio_core::types::addons::Manifest; use hyper::{Response, Request, Body, StatusCode, header, Method}; use serde_json; use now_lambda::IntoResponse; use super::server::ServerOptions; use super::builder::BuilderWithHandlers; use super::builder::AddonRouter; use futures::stream::Stream; use futures::future::Future; pub type Result<T> = std::result::Result<T, RouterError>; #[derive(Debug)] pub enum RouterError { HttpError(hyper::http::Error), SerdeError(serde_json::Error), } pub struct RouterResponse { response: Response<Body> } // implement now.sh lambda response trait impl IntoResponse for RouterResponse { // convert Hyper Response to Now.sh Response fn into_response(self) -> Response<now_lambda::Body> { let (parts, body) = self.response.into_parts(); // get original response body as bytes array let bytes = body .concat2() .wait() // at least log error .map_err(|error| eprintln!("into_response error: {}", error)) .unwrap() .into_bytes(); let mut bytes_array: Vec<u8> = vec![]; bytes_array.extend_from_slice(&*bytes); Response::from_parts(parts, now_lambda::Body::from(bytes_array)) } } // read RouterResponse from Hyper Response impl From<Response<Body>> for RouterResponse { fn from(response: Response<Body>) -> RouterResponse { Self {response} } } impl RouterResponse { pub fn response(self) -> Response<Body> { self.response } pub fn response_serverless(self) -> Response<now_lambda::Body> { self.into_response() } } pub struct Router { build: BuilderWithHandlers, options: ServerOptions } impl Router { pub fn new(build: BuilderWithHandlers, options: ServerOptions) -> Self { Self {build, options} } fn get_manifest(&self) -> &Manifest { self.build.handlers[0].get_manifest() } fn json_response(&self, json: String) -> Result<Response<Body>> { Response::builder() .status(StatusCode::OK) .header("access-control-allow-origin", "*") // CORS .header("Cache-Control", format!("max-age={}, public", self.options.cache_max_age)) // cache .header(header::CONTENT_TYPE, "application/json") .body(Body::from(json)) .map_err(RouterError::HttpError) } fn html_response(&self, html: String) -> Result<Response<Body>> { Response::builder() .status(StatusCode::OK) .header(header::CONTENT_TYPE, "text/html") .body(Body::from(html)) .map_err(RouterError::HttpError) } fn not_found(&self) -> Result<Response<Body>> { Response::builder() .status(StatusCode::NOT_FOUND) .body(Body::from("Not Found")) .map_err(RouterError::HttpError) } fn method_not_allowed(&self) -> Result<Response<Body>> { Response::builder() .status(StatusCode::METHOD_NOT_ALLOWED) .body(Body::from("Method not allowed")) .map_err(RouterError::HttpError) } pub fn handle_manifest(&self) -> Result<Response<Body>> { let json = serde_json::to_string(self.get_manifest()) .map_err(RouterError::SerdeError)?; self.json_response(json) } pub fn handle_resource(&self, path: &str) -> Result<Response<Body>> { let res = match self.build.handle(path) { Some(res) => res, None => return self.not_found() }; let json = serde_json::to_string(&res) .map_err(RouterError::SerdeError)?; self.json_response(json) } pub fn handle_landing(&self, template: String) -> Result<Response<Body>> { self.html_response(template) } pub fn handle_default_landing(&self) -> Result<Response<Body>> { self.handle_landing(landing_template(self.get_manifest())) } pub fn route<T>(&self, request: Request<T>) -> Result<RouterResponse> { if request.method() != Method::GET { return Ok(RouterResponse::from(self.method_not_allowed()?)); } let path = request.uri().path(); Ok(RouterResponse::from( match path { "/manifest.json" => self.handle_manifest()?, "/" => self.handle_default_landing()?, _ => self.handle_resource(path)?, } )) } }
// use std::io::stdin; use diesel::prelude::*; use console::Style; use sl_lib::custom::str_from_stdin; use sl_lib::models::*; use sl_lib::*; use std::io::Result; use sl_lib::custom::{ // str_from_stdin, file_to_string, // steadylearner_if_none, last_in_path, // none_if_empty_or_return_spilit_vector_string, }; pub fn show() -> Result<()> { use schema::ytbs::dsl::*; let green = Style::new().green(); // let yellow = Style::new().yellow(); let cyan = Style::new().cyan(); let bold = Style::new().bold(); println!( "{}", bold.apply_to( "Type [a] to show a video contents and [m] to show multiple published video contents." ) ); let result = str_from_stdin() .chars() .next() // equals to .nth(0) .expect("string is empty"); let connection = init_pool().get().unwrap(); match result { 'a' => { println!( "{}", cyan.apply_to("\nType database id for youtube content to show") ); let ytb_id = last_in_path(&str_from_stdin()); let ytb = ytbs .find(&ytb_id) .first::<Ytb>(&*connection) .expect("Error loading ytbs"); println!("YouTube id is {}", green.apply_to(ytb.id)); println!("User id for that is {}", green.apply_to(ytb.user_id)); println!("\n"); println!( "The content for that is \n {:#?}, ", bold.apply_to(ytb.content) ); println!("\n"); println!("Published? [{:#?}], ", cyan.apply_to(ytb.published)); Ok(()) } 'm' => { println!( "{}", cyan.apply_to("\nType number of published ytbs to show") ); let ytb_number = str_from_stdin().parse::<i64>().expect("Invalid number"); let results = ytbs .filter(published.eq(true)) .limit(ytb_number) .load::<Ytb>(&*connection) .expect("Error loading ytbs"); println!("\nDisplaying {} YouTube Contents", results.len()); for ytb in results { println!("YouTube id is {}", green.apply_to(ytb.id)); println!("User id for that is {}", green.apply_to(ytb.user_id)); println!("\n"); println!( "The content for that is \n {:#?}, ", bold.apply_to(ytb.content) ); println!("\n"); println!("Published? [{:#?}], ", cyan.apply_to(ytb.published)); // println!("tags? [{:#?}]", ytb.tags); } Ok(()) } _ => { println!("Invalid input, You should type either a or m to command line."); Ok(()) } } }
use crate::List::{Cons, Nil}; use std::ops::Deref; fn main() { let _list = Cons(1, Box::new(Cons(1, Box::new(Cons(3, Box::new(Nil)))))); deref_pr(); deref_mybox(); let a = MyBox::new(String::from("Rust")); // implicit deref conercion -> &MyBox<String> => &String hello(&a); //1. (*a) -> MyBox<String> => String //2. '&' and '[..]. take a string slice of the String hello(&(*a)[..]); dropping(); } enum List { Cons(i32, Box<List>), Nil, } fn deref_pr() { let x = 5; let y = &x; assert_eq!(5, x); // assert_eq!(5, y); assert_eq!(5, *y); let y = Box::new(x); // Box points to copie of value assert_eq!(5, x); assert_eq!(5, *y) } struct MyBox<T>(T); impl<T> MyBox<T> { fn new(x: T) -> MyBox<T> { MyBox(x) } } fn deref_mybox() { let x = 5; let y = MyBox::new(x); assert_eq!(5, x); // *(y.deref()) assert_eq!(5, *y); } impl<T> Deref for MyBox<T> { type Target = T; fn deref(&self) -> &Self::Target { &self.0 } } fn hello(name: &str) { println!("Hello {name}"); } struct CustomDrop { data: String, } impl Drop for CustomDrop { fn drop(&mut self) { println!("Dropping data {}", self.data); } } fn dropping() { let a = CustomDrop{ data: "first".to_string(), }; { let b = CustomDrop { data: "second".to_string(), }; } drop(a); println!("Pointers created") }
use crate::faster_vec; use ndarray::linalg; use ndarray::s; use ndarray::Array; use ndarray::Dimension; use ndarray::Ix2; use ndarray::LinalgScalar; use ndarray::{ArrayView, ArrayViewMut, Axis}; use std::slice::{from_raw_parts, from_raw_parts_mut}; use std::time::Instant; const MULT_CHUNK: usize = 1 * 1024; pub fn timed_matmul_seq_f32(size: usize, name: &str, power2: bool) -> u64 { let mut size = size; if power2 { size = size.next_power_of_two(); } let a = Array::from_shape_fn((size, size), |(i, j)| (i * size + j) as f32); let b = Array::from_shape_fn((size, size), |(i, j)| ((i * size) + j + 7) as f32); let mut dest = Array::zeros((size, size)); let start = Instant::now(); linalg::general_mat_mul(1.0, &a.view(), &b.view(), 1.0, &mut dest.view_mut()); let dur = Instant::now() - start; let nanos = u64::from(dur.subsec_nanos()) + dur.as_secs() * 1_000_000_000u64; println!( "{}:\t{}x{} matrix: {} s", name, size, size, nanos as f32 / 1e9f32 ); nanos } pub fn timed_matmul_seq_u32(size: usize, name: &str, power2: bool) -> u64 { let mut size = size; if power2 { size = size.next_power_of_two(); } let a = Array::from_shape_fn((size, size), |(i, j)| (i * size + j) as u32); let b = Array::from_shape_fn((size, size), |(i, j)| ((i * size) + j + 7) as u32); let mut dest = Array::zeros((size, size)); let start = Instant::now(); linalg::general_mat_mul(1u32, &a.view(), &b.view(), 1u32, &mut dest.view_mut()); let dur = Instant::now() - start; let nanos = u64::from(dur.subsec_nanos()) + dur.as_secs() * 1_000_000_000u64; println!( "{}:\t{}x{} matrix: {} s", name, size, size, nanos as f32 / 1e9f32 ); nanos } pub fn timed_matmul_ndarray_f32(size: usize, name: &str, power2: bool) -> u64 { let mut size = size; if power2 { size = size.next_power_of_two(); } let a = Array::from_shape_fn((size, size), |(i, j)| (i * size + j) as f32); let b = Array::from_shape_fn((size, size), |(i, j)| ((i * size) + j + 7) as f32); let mut dest = Array::zeros((size, size)); let start = Instant::now(); mult(a.view(), b.view(), dest.view_mut()); let dur = Instant::now() - start; let nanos = u64::from(dur.subsec_nanos()) + dur.as_secs() * 1_000_000_000u64; // let mut verif = Array::zeros((size,size)); // linalg::general_mat_mul(1.0, &a, &b, 1.0, &mut verif); // assert_eq!(dest,verif); println!( "{}:\t{}x{} matrix: {} s", name, size, size, nanos as f32 / 1e9f32 ); nanos } pub fn timed_matmul_ndarray_u32(size: usize, name: &str, power2: bool) -> u64 { let mut size = size; if power2 { size = size.next_power_of_two(); } let a = Array::from_shape_fn((size, size), |(i, j)| (i * size + j) as u32); let b = Array::from_shape_fn((size, size), |(i, j)| ((i * size) + j + 7) as u32); let mut dest = Array::zeros((size, size)); let start = Instant::now(); mult(a.view(), b.view(), dest.view_mut()); let dur = Instant::now() - start; let nanos = u64::from(dur.subsec_nanos()) + dur.as_secs() * 1_000_000_000u64; // let mut verif = Array::zeros((size,size)); // linalg::general_mat_mul(1.0, &a, &b, 1.0, &mut verif); // assert_eq!(dest,verif); println!( "{}:\t{}x{} matrix: {} s", name, size, size, nanos as f32 / 1e9f32 ); nanos } pub fn timed_matmul_faster_f32(size: usize, name: &str, power2: bool) -> u64 { let mut size = size; if power2 { size = size.next_power_of_two(); } let a = Array::from_shape_fn((size, size), |(i, j)| (i * size + j) as f32); let b = Array::from_shape_fn((size, size), |(i, j)| ((i * size) + j + 7) as f32); let mut dest = Array::zeros((size, size)); let start = Instant::now(); mult_nd_faster(a.view(), b.view(), dest.view_mut()); let dur = Instant::now() - start; let nanos = u64::from(dur.subsec_nanos()) + dur.as_secs() * 1_000_000_000u64; // let mut verif = Array::zeros((size,size)); // linalg::general_mat_mul(1.0, &a, &b, 1.0, &mut verif); // assert_eq!(dest,verif); println!( "{}:\t{}x{} matrix: {} s", name, size, size, nanos as f32 / 1e9f32 ); nanos } pub fn timed_matmul_faster_u32(size: usize, name: &str, power2: bool) -> u64 { let mut size = size; if power2 { size = size.next_power_of_two(); } let a = Array::from_shape_fn((size, size), |(i, j)| (i * size + j) as u32); let b = Array::from_shape_fn((size, size), |(i, j)| ((i * size) + j + 7) as u32); let mut dest = Array::zeros((size, size)); let start = Instant::now(); mult_nd_faster_u32(a.view(), b.view(), dest.view_mut()); let dur = Instant::now() - start; let nanos = u64::from(dur.subsec_nanos()) + dur.as_secs() * 1_000_000_000u64; // let mut verif = Array::zeros((size,size)); // linalg::general_mat_mul(1.0, &a, &b, 1.0, &mut verif); // assert_eq!(dest,verif); println!( "{}:\t{}x{} matrix: {} s", name, size, size, nanos as f32 / 1e9f32 ); nanos } pub fn mult<'a, 'b, 'd, A>( a: ArrayView<'a, A, Ix2>, b: ArrayView<'b, A, Ix2>, mut result: ArrayViewMut<'d, A, Ix2>, ) -> ArrayViewMut<'d, A, Ix2> where A: LinalgScalar + Send + Sync, { let dima = a.shape(); if dima[0] * dima[1] <= MULT_CHUNK { linalg::general_mat_mul(A::one(), &a, &b, A::one(), &mut result); return result; } let (rrow, rcol) = result.dim(); let (a1, a2, a3, a4) = divide(a); let (b1, b2, b3, b4) = divide(b); let (d1, d2, d3, d4) = divide_mut(result.slice_mut(s![0..rrow;1,0..rcol;1])); let (d, f, g, h) = join4( || mult(a1, b1, d1), || mult(a1, b2, d2), || mult(a3, b1, d3), || mult(a3, b2, d4), ); let (_r1, _r2, _r3, _r4) = join4( || mult(a2, b3, d), || mult(a2, b4, f), || mult(a4, b3, g), || mult(a4, b4, h), ); result } pub fn mult_nd_faster<'a, 'b, 'd>( a: ArrayView<'a, f32, Ix2>, b: ArrayView<'b, f32, Ix2>, mut result: ArrayViewMut<'d, f32, Ix2>, ) -> ArrayViewMut<'d, f32, Ix2> { let dima = a.shape(); let dimb = b.shape(); if dima[0] == 0 || dima[1] == 0 || dimb[0] == 0 || dimb[1] == 0 { return result; } if dima[0] * dima[1] <= MULT_CHUNK { let (raw_ptr_a, len_a) = view_ptr(a); let stridesa = a.strides(); let (raw_ptr_b, len_b) = view_ptr(b); let stridesb = b.strides(); let raw_ptr_r = result.as_mut_ptr(); let dimr = result.shape(); let (row, col) = (dimr[0], dimr[1]); let strides = result.strides(); let len_r = (row - 1) * strides[0] as usize + col; let slicea = unsafe { from_raw_parts(raw_ptr_a, len_a) }; let sliceb = unsafe { from_raw_parts(raw_ptr_b, len_b) }; let mut slicer = unsafe { from_raw_parts_mut(raw_ptr_r, len_r) }; faster_vec::multiply_add( &mut slicer, &slicea, &sliceb, dima[1], dima[0], dimb[1], dimb[0], dimr[1], dimr[0], stridesa[0] as usize, stridesb[0] as usize, strides[0] as usize, ); return result; } let (rrow, rcol) = result.dim(); let (a1, a2, a3, a4) = divide(a); let (b1, b2, b3, b4) = divide(b); let (d1, d2, d3, d4) = divide_mut(result.slice_mut(s![0..rrow;1,0..rcol;1])); let (d, f, g, h) = join4( || mult_nd_faster(a1, b1, d1), || mult_nd_faster(a1, b2, d2), || mult_nd_faster(a3, b1, d3), || mult_nd_faster(a3, b2, d4), ); let (_r1, _r2, _r3, _r4) = join4( || mult_nd_faster(a2, b3, d), || mult_nd_faster(a2, b4, f), || mult_nd_faster(a4, b3, g), || mult_nd_faster(a4, b4, h), ); result } pub fn mult_nd_faster_u32<'a, 'b, 'd>( a: ArrayView<'a, u32, Ix2>, b: ArrayView<'b, u32, Ix2>, mut result: ArrayViewMut<'d, u32, Ix2>, ) -> ArrayViewMut<'d, u32, Ix2> { let dim = a.shape(); let dimb = b.shape(); if dim[0] * dim[1] <= MULT_CHUNK { let (raw_ptr_a, len_a) = view_ptr(a); let stridesa = a.strides(); let (raw_ptr_b, len_b) = view_ptr(b); let stridesb = b.strides(); let raw_ptr_r = result.as_mut_ptr(); let dimr = result.shape(); let (row, col) = (dimr[0], dimr[1]); let strides = result.strides(); let len_r = (row - 1) * strides[0] as usize + col; let slicea = unsafe { from_raw_parts(raw_ptr_a, len_a) }; let sliceb = unsafe { from_raw_parts(raw_ptr_b, len_b) }; let mut slicer = unsafe { from_raw_parts_mut(raw_ptr_r, len_r) }; faster_vec::multiply_add_u32( &mut slicer, &slicea, &sliceb, dim[1], dim[0], dimb[1], dimb[0], dimr[1], dimr[0], stridesa[0] as usize, stridesb[0] as usize, strides[0] as usize, ); return result; } let (rrow, rcol) = result.dim(); let (a1, a2, a3, a4) = divide(a); let (b1, b2, b3, b4) = divide(b); let (d1, d2, d3, d4) = divide_mut(result.slice_mut(s![0..rrow;1,0..rcol;1])); let (d, f, g, h) = join4( || mult_nd_faster_u32(a1, b1, d1), || mult_nd_faster_u32(a1, b2, d2), || mult_nd_faster_u32(a3, b1, d3), || mult_nd_faster_u32(a3, b2, d4), ); let (_r1, _r2, _r3, _r4) = join4( || mult_nd_faster_u32(a2, b3, d), || mult_nd_faster_u32(a2, b4, f), || mult_nd_faster_u32(a4, b3, g), || mult_nd_faster_u32(a4, b4, h), ); result } pub fn divide_mut<'a: 'b, 'b, A>( a: ArrayViewMut<'a, A, Ix2>, ) -> ( ArrayViewMut<'a, A, Ix2>, ArrayViewMut<'a, A, Ix2>, ArrayViewMut<'a, A, Ix2>, ArrayViewMut<'a, A, Ix2>, ) { let dim = a.shape(); let (arow, acol) = (dim[0], dim[1]); let l_row = arow / 2; let l_col = acol / 2; let (a1, a2) = a.split_at(Axis(0), l_row); let (a11, a12) = a1.split_at(Axis(1), l_col); let (a21, a22) = a2.split_at(Axis(1), l_col); (a11, a12, a21, a22) } pub fn divide_power2_friendly_mut<'a, A, D>( a: ArrayViewMut<'a, A, D>, ) -> ( ArrayViewMut<'a, A, D>, ArrayViewMut<'a, A, D>, ArrayViewMut<'a, A, D>, ArrayViewMut<'a, A, D>, ) where D: Dimension, { let dim = a.shape(); let (arow, acol) = (dim[0], dim[1]); let mut ar2 = arow; if !arow.is_power_of_two() { ar2 = arow.next_power_of_two(); } let l_row_bef = ar2 / 4; let l_row_after = ar2 /2; let diff1 = arow/2 - l_row_bef; let diff2 = l_row_after - arow/2; let mut row_calc = l_row_after; if diff2 > diff1 { row_calc = l_row_bef; } let mut ac2 = acol; if !acol.is_power_of_two() { ac2 = acol.next_power_of_two(); } let l_col_bef = ac2 / 4; let l_col_after = ac2 / 2; let diff1 = acol/2 - l_col_bef; let diff2 = l_col_after - acol/2; let mut col_calc = l_col_after; if diff2>diff1 { col_calc = l_col_bef; } let (a1, a2) = a.split_at(Axis(0), row_calc); let (a11, a12) = a1.split_at(Axis(1), col_calc); let (a21, a22) = a2.split_at(Axis(1), col_calc); (a11, a12, a21, a22) } pub fn divide<'a, A, D>( a: ArrayView<'a, A, D>, ) -> ( ArrayView<'a, A, D>, ArrayView<'a, A, D>, ArrayView<'a, A, D>, ArrayView<'a, A, D>, ) where D: Dimension, { let dim = a.shape(); let (arow, acol) = (dim[0], dim[1]); let l_row = arow / 2; let l_col = acol / 2; let (a1, a2) = a.split_at(Axis(0), l_row); let (a11, a12) = a1.split_at(Axis(1), l_col); let (a21, a22) = a2.split_at(Axis(1), l_col); (a11, a12, a21, a22) } pub fn divide_power2_friendly<'a, A, D>( a: ArrayView<'a, A, D>, ) -> ( ArrayView<'a, A, D>, ArrayView<'a, A, D>, ArrayView<'a, A, D>, ArrayView<'a, A, D>, ) where D: Dimension, { let dim = a.shape(); let (arow, acol) = (dim[0], dim[1]); let mut ar2 = arow; if !arow.is_power_of_two() { ar2 = arow.next_power_of_two(); } let l_row_bef = ar2 / 4; let l_row_after = ar2 /2; let diff1 = arow/2 - l_row_bef; let diff2 = l_row_after - arow/2; let mut row_calc = l_row_after; if diff2 > diff1 { row_calc = l_row_bef; } let mut ac2 = acol; if !acol.is_power_of_two() { ac2 = acol.next_power_of_two(); } let l_col_bef = ac2 / 4; let l_col_after = ac2 / 2; let diff1 = acol/2 - l_col_bef; let diff2 = l_col_after - acol/2; let mut col_calc = l_col_after; if diff2>diff1 { col_calc = l_col_bef; } let (a1, a2) = a.split_at(Axis(0), row_calc); let (a11, a12) = a1.split_at(Axis(1), col_calc); let (a21, a22) = a2.split_at(Axis(1), col_calc); (a11, a12, a21, a22) } pub fn divide_at_id_along_axis<'a, A>( a: ArrayView<'a, A, Ix2>, index: usize, axis: Axis, ) -> (ArrayView<'a, A, Ix2>, ArrayView<'a, A, Ix2>) { let dim = a.shape(); let (_arow, acol) = (dim[0], dim[1]); if axis.index() == 0 { let l_row = (index as f64 / acol as f64).ceil() as usize; let (a1, a2) = a.split_at(axis, l_row); (a1, a2) } else { let mut l_col = index; if(acol != 1 && acol != 0){ l_col = index % acol; } let (a1, a2) = a.split_at(axis, l_col); (a1, a2) } } pub fn divide_mut_at_id_along_axis<'a, A>( a: ArrayViewMut<'a, A, Ix2>, index: usize, axis: Axis, ) -> (ArrayViewMut<'a, A, Ix2>, ArrayViewMut<'a, A, Ix2>) { let dim = a.shape(); let (_arow, acol) = (dim[0], dim[1]); if axis.index() == 0 { let l_row = (index as f64 / acol as f64).ceil() as usize; let (a1, a2) = a.split_at(axis, l_row); (a1, a2) } else { let mut l_col = index; if(acol != 1 && acol != 0){ l_col = index % acol; } let (a1, a2) = a.split_at(axis, l_col); (a1, a2) } } fn join4<F1, F2, F3, F4, R1, R2, R3, R4>(f1: F1, f2: F2, f3: F3, f4: F4) -> (R1, R2, R3, R4) where F1: FnOnce() -> R1 + Send, R1: Send, F2: FnOnce() -> R2 + Send, R2: Send, F3: FnOnce() -> R3 + Send, R3: Send, F4: FnOnce() -> R4 + Send, R4: Send, { let ((r1, r2), (r3, r4)) = rayon::join(|| rayon::join(f1, f2), || rayon::join(f3, f4)); (r1, r2, r3, r4) } pub fn view_ptr<A>(view: ArrayView<A, Ix2>) -> (*const A, usize) where A: LinalgScalar, { let dim = view.shape(); let (row, _col) = (dim[0], dim[1]); let raw_ptr = view.as_ptr(); let strides = view.strides(); let len = row * strides[0] as usize; (raw_ptr, len) } #[test] fn test_mult() { let a = Array::from_shape_fn((25, 25), |(i, j)| i + j); let b = Array::from_shape_fn((25, 25), |(i, j)| i + j); let (a_r, _a_c) = a.dim(); let (_b_r, b_c) = b.dim(); let mut result = Array::zeros((a_r, b_c)); mult(a.view(), b.view(), result.view_mut()); let mut verif = Array::zeros((25, 25)); linalg::general_mat_mul(1, &a, &b, 1, &mut verif); assert_eq!(verif, result); }
//! Implements `StdTimeManager`. use std::sync::RwLock; use std::time::{SystemTime, Duration}; use std::cmp::min; use board::*; use depth::*; use value::*; use search::*; use ttable::Variation; use search_node::SearchNode; use time_manager::{TimeManager, RemainingTime}; use uci::{SetOption, OptionDescription}; /// Implements the `TimeManager` trait. pub struct StdTimeManager { started_at: SystemTime, depth: Depth, value: Value, data_points: Vec<(f64, f64)>, hard_limit: f64, allotted_time: f64, must_play: bool, } impl<T> TimeManager<T> for StdTimeManager where T: DeepeningSearch<ReportData = Vec<Variation>> { fn new(position: &T::SearchNode, time: &RemainingTime) -> StdTimeManager { // Get our remaining time and time increment (in milliseconds). let (t, inc) = if position.board().to_move == WHITE { (time.white_millis as f64, time.winc_millis as f64) } else { (time.black_millis as f64, time.binc_millis as f64) }; // Get the number of moves until the next time control, or if // not available, guess the number of moves to the end of the // game. let n = time.movestogo.unwrap_or(40) as f64; debug_assert!(n >= 1.0); // Calculate the total time we have. let time_heap = t + inc * (n - 1.0); // Set a hard limit for the time we will spend on this // move. Thinking longer that that would be reckless. let hard_limit = (t / n.sqrt() + inc).min(t - 1000.0); StdTimeManager { started_at: SystemTime::now(), depth: 0, value: VALUE_UNKNOWN, data_points: Vec::with_capacity(32), hard_limit: if position.legal_moves().len() > 1 { hard_limit } else { // When there is only one legal move, the engine is // allowed to think just a fraction of a second in // order to find a good ponder move. hard_limit.min(500.0) }, allotted_time: if ::get_option("Ponder") == "true" { // Statistically, the move we ponder will be played in // 50% of the cases. Therefore, in principal we should // add half of opponent's thinking time to our time // heap. In reality we do not know how opponent's time // will be spend, so we speculatively increase our // time heap by 50%. 1.5 * time_heap / n } else { time_heap / n }, must_play: false, } } #[allow(unused_variables)] fn must_play(&mut self, search_instance: &mut T, report: Option<&SearchReport<Vec<Variation>>>) -> bool { if !self.must_play { let mut is_finished = false; if let Some(r) = report { if r.depth > self.depth { self.depth = r.depth; let (target_depth, t_next) = self.target_depth(r); let t_pessimistic = t_next * AVG_SLOPE.read().unwrap().exp().sqrt(); let msg = format!("TARGET_DEPTH={}", target_depth); search_instance.send_message(msg.as_str()); is_finished = r.depth >= target_depth || t_pessimistic > self.hard_limit } } self.must_play = is_finished || elapsed_millis(&self.started_at) > self.hard_limit; } self.must_play } } impl SetOption for StdTimeManager { fn options() -> Vec<(&'static str, OptionDescription)> { vec![("Ponder", OptionDescription::Check { default: false })] } } impl StdTimeManager { /// Guesses what target depth we will be able to reach, and how /// much time (milliseconds) it will take for the next search /// depth to complete. fn target_depth(&mut self, report: &SearchReport<Vec<Variation>>) -> (Depth, f64) { let t = elapsed_millis(&self.started_at); // Ignore the first 1-2 depths. if t < 0.001 || report.searched_nodes < 100 { return (DEPTH_MAX, t); } // Add (x, y) to the list of data points. let x = report.depth as f64; let y = t.ln(); self.data_points.push((x, y)); // Do a linear extrapolation based on the last `M` data points. const M: usize = 5; let y_max = self.allotted_time.max(0.001).ln(); let x_max; let t_next; match self.data_points.len() { n if n >= M => { let last_m = &self.data_points[n - M..]; let (slope, intercept) = linear_regression(last_m); debug_assert!(slope >= 0.001); if n == M { let mut s = AVG_SLOPE.write().unwrap(); *s = (*s * 2.0 + slope) / 3.0; } x_max = (y_max - intercept) / slope; t_next = (slope * (x + 1.0) + intercept).exp(); } _ => { // There are not enough data points yet -- use `AVG_SLOPE`. let s = AVG_SLOPE.read().unwrap(); x_max = x + (y_max - y) / *s; t_next = t * s.exp(); } }; // Set the target depth as close as possible to `x_max`. let mut target_depth = x_max .round() .min(DEPTH_MAX as f64) .max(DEPTH_MIN as f64) as Depth; // Search one ply deeper if the target depth is reached, but // position's evaluation worsened a lot. // // TODO: `25` must be bound to pawn's value. if target_depth <= report.depth && (self.value != VALUE_UNKNOWN && report.value != VALUE_UNKNOWN) && (self.value as isize - report.value as isize >= 25) { target_depth = min(report.depth + 1, DEPTH_MAX); } self.value = report.value; (target_depth, t_next) } } lazy_static! { static ref AVG_SLOPE: RwLock<f64> = RwLock::new(0.7); } /// Calculates elapsed milliseconds since a given time. fn elapsed_millis(since: &SystemTime) -> f64 { let d = since.elapsed().unwrap_or(Duration::from_millis(0)); (1000 * d.as_secs()) as f64 + (d.subsec_nanos() / 1_000_000) as f64 } /// Calculates a regression line that approximates `points`. fn linear_regression(points: &[(f64, f64)]) -> (f64, f64) { debug_assert!(points.len() > 1); let sum_x = points.iter().fold(0.0, |acc, &p| acc + p.0); let sum_y = points.iter().fold(0.0, |acc, &p| acc + p.1); let sum_xx = points.iter().fold(0.0, |acc, &p| acc + p.0 * p.0); let sum_xy = points.iter().fold(0.0, |acc, &p| acc + p.0 * p.1); let n = points.len() as f64; let slope = (n * sum_xy - sum_x * sum_y) / (n * sum_xx - sum_x * sum_x); let intercept = (sum_y - slope * sum_x) / n; (slope.max(0.001), intercept) } #[cfg(test)] mod tests { #[test] fn linear_regression() { use super::linear_regression; let points = vec![(21.0, 1.0), (22.0, 2.0), (23.0, 3.0), (24.0, 4.0)]; let x = 25.0; let (slope, intercept) = linear_regression(&points); let y = slope * x + intercept; assert!(4.99 < y && y < 5.01); } }
pub trait BeBytes<const N: usize> { fn be_bytes(&self) -> [u8; N]; } impl BeBytes<1> for u8 { #[inline] fn be_bytes(&self) -> [u8; 1] { self.to_be_bytes() } } impl BeBytes<2> for u16 { #[inline] fn be_bytes(&self) -> [u8; 2] { self.to_be_bytes() } } impl BeBytes<4> for u32 { #[inline] fn be_bytes(&self) -> [u8; 4] { self.to_be_bytes() } } impl BeBytes<8> for u64 { #[inline] fn be_bytes(&self) -> [u8; 8] { self.to_be_bytes() } } impl BeBytes<16> for u128 { #[inline] fn be_bytes(&self) -> [u8; 16] { self.to_be_bytes() } } impl BeBytes<1> for i8 { #[inline] fn be_bytes(&self) -> [u8; 1] { self.to_be_bytes() } } impl BeBytes<2> for i16 { #[inline] fn be_bytes(&self) -> [u8; 2] { self.to_be_bytes() } } impl BeBytes<4> for i32 { #[inline] fn be_bytes(&self) -> [u8; 4] { self.to_be_bytes() } } impl BeBytes<8> for i64 { #[inline] fn be_bytes(&self) -> [u8; 8] { self.to_be_bytes() } } impl BeBytes<16> for i128 { #[inline] fn be_bytes(&self) -> [u8; 16] { self.to_be_bytes() } }
use auto_impl::auto_impl; #[auto_impl(&, &mut)] struct Foo(usize, String); fn main() {}
mod css; mod cui; pub use self::cui::{CuiProperty, PageProperty}; use {self::css::CSS_PROPERTIES, super::Value, std::collections::HashMap}; pub type CssProperty = String; pub type CssProperties = HashMap<CssProperty, Value>; pub type CssRules = HashMap<String, CssProperties>; #[derive(Clone, Eq, PartialEq, Hash, Debug)] pub enum Property { Css(String), Cui(CuiProperty), Page(PageProperty), } pub type Properties = HashMap<Property, Value>; fn is_css_property(name: &str) -> bool { CSS_PROPERTIES.contains(name) } impl Property { pub fn new(property: String) -> Self { if is_css_property(&property) { Self::Css(property) } else { match property.as_str() { "title" => Self::Page(PageProperty::Title), property => Self::Cui(match property { "text" => CuiProperty::Text, "link" => CuiProperty::Link, "tooltip" => CuiProperty::Tooltip, "image" => CuiProperty::Image, property => panic!(" property not recognized: {}", property), }), } } } }
use failure::{Fail, Fallible}; use http::header::{self, AUTHORIZATION}; use reqwest::blocking::Client; use serde::Deserialize; use slog_scope::error; use crate::components::access_token_provider::{AccessTokenProvider, AtpError}; #[derive(Debug, Clone, Deserialize, PartialEq, Eq)] pub struct Device { pub id: String, pub name: String, pub is_active: bool, } #[derive(Debug, Clone, Deserialize)] struct DevicesResponse { pub devices: Vec<Device>, } #[derive(Debug, Fail)] pub enum JukeboxError { #[fail(display = "Device not found: {}", device_name)] DeviceNotFound { device_name: String }, #[fail(display = "Failed to retrieve access token: {}", err)] TokenRetrieval { err: AtpError }, #[fail(display = "HTTP Failure: {}", err)] HttpError { err: reqwest::Error }, #[fail(display = "JSON Deserialization Failure: {}", err)] DeserializationFailed { err: serde_json::Error }, } impl From<AtpError> for JukeboxError { fn from(err: AtpError) -> Self { JukeboxError::TokenRetrieval { err } } } impl From<reqwest::Error> for JukeboxError { fn from(err: reqwest::Error) -> Self { JukeboxError::HttpError { err } } } impl From<serde_json::Error> for JukeboxError { fn from(err: serde_json::Error) -> Self { JukeboxError::DeserializationFailed { err } } } pub fn lookup_device_by_name( access_token_provider: &AccessTokenProvider, device_name: &str, ) -> Result<Device, JukeboxError> { let http_client = Client::new(); let access_token = access_token_provider.get_bearer_token()?; let rsp = http_client .get("https://api.spotify.com/v1/me/player/devices") .header(AUTHORIZATION, &access_token) .send()?; let rsp: DevicesResponse = rsp.json()?; let opt_dev = rsp .devices .into_iter() .filter(|x| x.name == device_name) .next(); match opt_dev { Some(dev) => Ok(dev), None => Err((JukeboxError::DeviceNotFound { device_name: device_name.clone().to_string(), }) .into()), } } pub async fn async_lookup_device_by_name( http_client: &reqwest::Client, access_token_provider: &AccessTokenProvider, device_name: &str, ) -> Result<Device, JukeboxError> { let access_token = access_token_provider.get_bearer_token()?; let rsp = http_client .get("https://api.spotify.com/v1/me/player/devices") .header(AUTHORIZATION, &access_token) .send() .await? .json::<DevicesResponse>() .await?; let opt_dev = rsp .devices .into_iter() .filter(|x| x.name == device_name) .next(); match opt_dev { Some(dev) => Ok(dev), None => Err((JukeboxError::DeviceNotFound { device_name: device_name.clone().to_string(), }) .into()), } } #[derive(Deserialize, Debug, Clone)] struct CurrentlyPlayingObject { pub is_playing: bool, } pub async fn is_currently_playing( http_client: &reqwest::Client, access_token_provider: &AccessTokenProvider, device_name: &str, ) -> Fallible<bool> { let msg = "Failed to retrieve currently-playing information"; let access_token = access_token_provider.get_bearer_token()?; let device = async_lookup_device_by_name(&http_client, access_token_provider, device_name).await?; let currently_playing = http_client .get("https://api.spotify.com/v1/me/player/currently-playing") // .query(&[("device_id", &device.id)]) .body("") .header(header::CONTENT_LENGTH, 0) .header(AUTHORIZATION, access_token) .send() .await .map_err(|err| { error!("{}: Executing HTTP request failed: {}", msg, err); err }) .map(|rsp| { if !rsp.status().is_success() { error!("{}: HTTP Failure {}", msg, rsp.status()); } rsp })? .error_for_status()? .json::<CurrentlyPlayingObject>() .await?; Ok(device.is_active && currently_playing.is_playing) }
use juicydb::db::*; use juicydb::parser::*; use juicydb::storage_manager::*; use std::io::{self, Write}; fn main() { println!("Welcome to juicydb"); let mut storage = StorageManager::new(); loop { print!("> "); io::stdout().flush().expect("Failed to flush prompt"); let mut input = String::new(); io::stdin() .read_line(&mut input) .expect("Failed to read line"); if input.is_empty() { break; } let mut parser = Parser::new(&input); let stmt = parser.parse_command(); match stmt { Ok(stmt) => match stmt { Command::Statement(stmt) => { let process = match stmt { Statement::CreateTable { table, columns } => { storage.create_table(table, Schema::from(columns)) } Statement::InsertInto { table, values } => { storage.insert_into(table, values) } query => storage.query(query).and_then(|rows| { for row in rows { for col in row { print!("{}, ", col); } println!(); } Ok(()) }), }; if let Err(err) = process { println!("SQL error: {}", err); }; } Command::MetaCommand(cmd) => match cmd { MetaCommand::Exit => return, MetaCommand::Print => println!("{:#?}", storage), }, }, Err(err) => println!("Parse error: {}", err), }; } }
// This file is part of lock-free-multi-producer-single-consumer-ring-buffer. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/lock-free-multi-producer-single-consumer-ring-buffer/master/COPYRIGHT. No part of lock-free-multi-producer-single-consumer-ring-buffer, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2017 - 2019 The developers of lock-free-multi-producer-single-consumer-ring-buffer. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/lock-free-multi-producer-single-consumer-ring-buffer/master/COPYRIGHT. #[derive(Debug)] struct RingBufferProducerInner { seen_offset: VolatileRingBufferOffset, } impl RingBufferProducerInner { const Default: Self = Self { seen_offset: VolatileRingBufferOffset(UnsafeCell::new(RingBufferInnerHeader::<()>::MaximumOffset)), }; #[inline(always)] fn initialize(this: NonNull<Self>) { unsafe { write(this.as_ptr(), RingBufferProducerInner::Default) } } }
use std::collections::HashMap; use std::io; use std::mem; use std::net::SocketAddr; use std::time::Duration; use futures::{Future, Stream, Async}; use tokio::net::{TcpListener, Incoming, TcpStream}; use tokio::clock; use tokio::timer::Delay; /// This stream replaces ``tokio_core::net::Incoming`` and listens many sockets /// /// It receives a stream of lists of addresses as an input. /// When a new value received on a stream it adapts: /// /// 1. Removes sockets not in set we're already received (already established /// connections aren't interfered in any way) /// 2. Adds sockets to set which wasn't listened before /// /// Instead of failing on bind error it logs the error and retries in a /// second (you can change the delay using `BindMany::retry_interval`) /// /// It's good idea to pass a stream with a **Void** error, because on receiving /// error `BindMany` will log a message (that doesn't contain an error) and /// will shutdown. It's better to log specific error and send end-of-stream /// instead, but that is user's responsibility. /// /// Note: we track identity of the sockets by `SocketAddr` used to bind it, /// this means `0.0.0.0` and `127.0.0.1` for example can be bound/unbound /// independently despite the fact that `0.0.0.0` can accept connections for /// `127.0.0.1`. /// /// # Example /// /// Simple example: /// /// ```rust,ignore /// lp.run( /// BindMany::new(address_stream) /// .sleep_on_error(TIME_TO_WAIT_ON_ERROR, &h2) /// .map(move |(mut socket, _addr)| { /// // Your future is here: /// Proto::new(socket) /// // Errors should not pass silently /// // common idea is to log them /// .map_err(|e| error!("Protocol error: {}", e)) /// }) /// .listen(MAX_SIMULTANEOUS_CONNECTIONS) /// ).unwrap(); // stream doesn't end in this case /// ``` /// /// Example using name resolution via abstract-ns + ns-env-config: /// /// ```rust,ignore /// extern crate ns_env_config; /// /// let mut lp = Core::new().unwrap(); /// let ns = ns_env_config::init(&lp.handle()).unwrap(); /// lp.run( /// BindMany::new(ns.resolve_auto("localhost", 8080) /// .map(|addr| addr.addresses_at(0))) /// .sleep_on_error(TIME_TO_WAIT_ON_ERROR, &h2) /// .map(move |(mut socket, _addr)| { /// // Your future is here: /// Proto::new(socket) /// // Errors should not pass silently /// // common idea is to log them /// .map_err(|e| eprintln!("Protocol error: {}", e)) /// }) /// .listen(MAX_SIMULTANEOUS_CONNECTIONS) /// ).unwrap(); // stream doesn't end in this case /// ``` /// /// pub struct BindMany<S> { addresses: S, retry_interval: Duration, retry_timer: Option<(Delay, Vec<SocketAddr>)>, inputs: HashMap<SocketAddr, Incoming>, } impl<S> BindMany<S> { /// Create a new instance pub fn new(s: S) -> BindMany<S> { BindMany { addresses: s, retry_interval: Duration::new(1, 0), retry_timer: None, inputs: HashMap::new(), } } /// Sets the retry interval /// /// Each time binding socket fails (including he first one on start) istead /// of immediately failing we log the error and sleep this interval to /// retry (by default 1 second). /// /// This behavior is important because if your configuration changes /// number of listening sockets, and one of them is either invalid or /// just DNS is temporarily down, you still need to serve other addresses. /// /// This also helps if you have failover IP which can only be listened /// at when IP attached to the host, but server must be ready to listen /// it anyway (this one might be better achieved by non-local bind though). pub fn retry_interval(&mut self, interval: Duration) -> &mut Self { self.retry_interval = interval; self } } impl<S> Stream for BindMany<S> where S: Stream, S::Item: IntoIterator<Item=SocketAddr>, { type Item = TcpStream; type Error = io::Error; fn poll(&mut self) -> Result<Async<Option<Self::Item>>, io::Error> { loop { match self.addresses.poll() { Ok(Async::Ready(None)) => { info!("Listening stream reached end-of-stream condition"); return Ok(Async::Ready(None)); } Ok(Async::Ready(Some(new))) => { let mut old = mem::replace(&mut self.inputs, HashMap::new()); let mut backlog = Vec::new(); for addr in new { if let Some(listener) = old.remove(&addr) { self.inputs.insert(addr, listener); } else { match TcpListener::bind(&addr) { Ok(l) => { self.inputs.insert(addr, l.incoming()); } Err(e) => { backlog.push(addr); error!("Error binding {:?}: {}, \ will retry in {:?}", addr, e, self.retry_interval); } } } } if backlog.len() > 0 { self.retry_timer = Some(( Delay::new(clock::now() + self.retry_interval), backlog)); } else { self.retry_timer = None; } } Ok(Async::NotReady) => break, Err(_) => { error!("Error in address stream"); return Ok(Async::Ready(None)); } } } loop { if let Some((ref mut timer, ref mut backlog)) = self.retry_timer { match timer.poll().expect("deadline never fails") { Async::Ready(()) => { for addr in mem::replace(backlog, Vec::new()) { match TcpListener::bind(&addr) { Ok(l) => { self.inputs.insert(addr, l.incoming()); } Err(e) => { backlog.push(addr); // Lower level on retry debug!("Error binding {:?}: {}, \ will retry in {:?}", addr, e, self.retry_interval); } } } if backlog.len() > 0 { *timer = Delay::new( clock::now() + self.retry_interval ); continue; // need to poll timer } // fallthrough to cleaning timer } Async::NotReady => break, } } self.retry_timer = None; break; } for inp in self.inputs.values_mut() { loop { match inp.poll() { Ok(Async::Ready(pair)) => { return Ok(Async::Ready(pair)); } Ok(Async::NotReady) => break, Err(e) => return Err(e), } } } return Ok(Async::NotReady); } }
extern crate multimap; use multimap::MultiMap; use std::thread; use std::sync::mpsc:: { self, Sender, Receiver, SendError, RecvError, }; use std::str::from_utf8; use std::fmt; use std::io:: { self, Read, }; use serde::Deserialize; use crate::directory:: { self, InitDirectoryError }; use crate::rule:: { parse_all, ParseError, Node, topological_sort, topological_sort_all, TopologicalSortError, }; use crate::ticket:: { Ticket, TicketFactory, }; use crate::packet:: { Packet, PacketError, }; use crate::blob:: { TargetFileInfo, FileResolution, }; use crate::work:: { WorkOption, WorkResult, WorkError, HandleNodeInfo, RuleExt, handle_rule_node, handle_source_only_node, clean_targets, }; use crate::cache:: { DownloaderCache, }; use crate::history:: { HistoryError, DownloaderHistory, }; use crate::memory:: { MemoryError }; use crate::printer::Printer; use termcolor:: { Color, }; use crate::system:: { System, SystemError }; use crate::system::util:: { read_file_to_string, ReadFileToStringError, }; /* Takes a vector of Nodes, iterates through them, and creates two multimaps, one for senders and one for receivers. */ fn make_multimaps(nodes : &Vec<Node>) -> ( MultiMap<usize, (usize, Sender<Packet>)>, MultiMap<usize, Receiver<Packet>> ) { let mut senders : MultiMap<usize, (usize, Sender<Packet>)> = MultiMap::new(); let mut receivers : MultiMap<usize, Receiver<Packet>> = MultiMap::new(); for (target_index, node) in nodes.iter().enumerate() { for (source_index, sub_index) in node.source_indices.iter() { let (sender, receiver) : (Sender<Packet>, Receiver<Packet>) = mpsc::channel(); senders.insert(*source_index, (*sub_index, sender)); receivers.insert(target_index, receiver); } } (senders, receivers) } #[derive(Debug)] pub enum BuildError { Canceled, ReceiverError(RecvError), SenderError(SendError<Packet>), MemoryFileFailedToRead(MemoryError), RuleFileNotUTF8, RuleFileFailedToRead(String, io::Error), RuleFileFailedToOpen(String, SystemError), WorkErrors(Vec<WorkError>), RuleFileFailedToParse(ParseError), TopologicalSortFailed(TopologicalSortError), DirectoryMalfunction, HistoryError(HistoryError), DownloadUrlsError(DownloadUrlsError), WorkError(WorkError), Weird, } impl fmt::Display for BuildError { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { match self { BuildError::Canceled => write!(formatter, "Canceled by a depdendence"), BuildError::ReceiverError(error) => write!(formatter, "Failed to recieve anything from source: {}", error), BuildError::SenderError(error) => write!(formatter, "Failed to send to dependent: {}", error), BuildError::MemoryFileFailedToRead(error) => write!(formatter, "Error history file not found: {}", error), BuildError::RuleFileNotUTF8 => write!(formatter, "Rule file not valid UTF8."), BuildError::RuleFileFailedToParse(error) => write!(formatter, "{}", error), BuildError::TopologicalSortFailed(error) => write!(formatter, "Dependence search failed: {}", error), BuildError::RuleFileFailedToRead(path, error) => write!(formatter, "Rules file {} failed to read with error: {}", path, error), BuildError::RuleFileFailedToOpen(path, error) => write!(formatter, "Rules file {} failed to open with error: {}", path, error), BuildError::WorkErrors(work_errors) => { let mut error_text = String::new(); for work_error in work_errors.iter() { error_text.push_str(&format!("{}\n", work_error)); } write!(formatter, "{}", error_text) }, BuildError::DirectoryMalfunction => write!(formatter, "Error while managing ruler directory."), BuildError::HistoryError(error) => write!(formatter, "Rule history error: {}", error), BuildError::DownloadUrlsError(error) => write!(formatter, "Error while establishing download-urls: {}", error), BuildError::WorkError(error) => write!(formatter, "{}", error), BuildError::Weird => write!(formatter, "Weird! How did you do that!"), } } } fn read_all_rules<SystemType : System> ( system : &SystemType, mut rulefile_paths : Vec<String> ) -> Result<Vec<(String, String)>, BuildError> { let mut result : Vec<(String, String)> = vec![]; for rulefile_path in rulefile_paths.drain(..) { match system.open(&rulefile_path) { Ok(mut file) => { let mut rule_content = Vec::new(); match file.read_to_end(&mut rule_content) { Ok(_size) => match from_utf8(&rule_content) { Ok(rule_text) => result.push((rulefile_path, rule_text.to_string())), Err(_) => return Err(BuildError::RuleFileNotUTF8), }, Err(error) => return Err( BuildError::RuleFileFailedToRead( rulefile_path.to_string(), error)), } }, Err(error) => return Err( BuildError::RuleFileFailedToOpen( rulefile_path.to_string(), error)), } } Ok(result) } /* This is the function that runs when you type "ruler nodes" at the commandline. It opens the rulefile, parses it, and returns the vector of rule Nodes. */ pub fn get_nodes < SystemType : System, > ( system : &SystemType, rulefile_paths : Vec<String>, goal_target_opt: Option<String> ) -> Result<Vec<Node>, BuildError> { let all_rule_text = read_all_rules(system, rulefile_paths)?; let rules = match parse_all(all_rule_text) { Ok(rules) => rules, Err(error) => return Err(BuildError::RuleFileFailedToParse(error)), }; Ok( match goal_target_opt { Some(goal_target) => { match topological_sort(rules, &goal_target) { Ok(nodes) => nodes, Err(error) => return Err(BuildError::TopologicalSortFailed(error)), } }, None => { match topological_sort_all(rules) { Ok(nodes) => nodes, Err(error) => return Err(BuildError::TopologicalSortFailed(error)), } } } ) } #[derive(Deserialize, PartialEq, Debug)] struct DownloadUrls { urls: Vec<String> } impl DownloadUrls { fn new() -> DownloadUrls { DownloadUrls { urls : Vec::new() } } } #[derive(Debug)] pub enum DownloadUrlsError { FailedToReadFile(ReadFileToStringError), TomlDeError(toml::de::Error), } impl fmt::Display for DownloadUrlsError { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { match self { DownloadUrlsError::FailedToReadFile(error) => write!(formatter, "Failed to create cache directory: {}", error), DownloadUrlsError::TomlDeError(error) => write!(formatter, "Download Urls file opened, but failed to parse as toml: {}", error), } } } /* From the given urls file, read the config file and parse as toml to obtain a DownloadUrlsList */ fn read_download_urls<SystemType : System> ( system : &SystemType, path_str : &str ) -> Result<DownloadUrls, DownloadUrlsError> { match read_file_to_string(system, path_str) { Ok(content_string) => { return match toml::from_str(&content_string) { Ok(config) => Ok(config), Err(error) => Err(DownloadUrlsError::TomlDeError(error)), } }, Err(error) => return Err(DownloadUrlsError::FailedToReadFile(error)), } } /* Takes a vector of receivers, and waits for them all to receive, so it can hash together all their results into one Ticket obejct. Returns an error if the receivers error or if the packet produces an error when it tries to get the ticket from it. */ fn wait_for_sources_ticket(receiver_vec : Vec<Receiver<Packet>>) -> Result<Ticket, BuildError> { let mut tickets = vec![]; let mut canceled = false; /* It is tempting to have this loop exit early if one source cancels, but that makes possible the following race: Suppose two sources A and B. A cancels quickly, then this loop bails early, the thread exist, the receiving channel closes. Later B tries to send a source ticket and fails with "sending on a closed channel" */ for receiver in receiver_vec.iter() { match receiver.recv() { Ok(packet) => { match packet.get_ticket() { Ok(ticket) => tickets.push(ticket), Err(PacketError::Cancel) => canceled = true, } }, Err(error) => return Err(BuildError::ReceiverError(error)), } } if canceled { return Err(BuildError::Canceled); } let mut factory = TicketFactory::new(); for ticket in tickets { factory.input_ticket(ticket); } Ok(factory.result()) } /* This is the function that runs when you type "ruler build" at the commandline. It opens the rulefile, parses it, and then either updates all targets in all rules or, if goal_target_opt is Some, only the targets that are ancestors of goal_target_opt in the dependence graph. */ pub fn build < SystemType : System + 'static, PrinterType : Printer, > ( mut system : SystemType, directory_path : &str, rulefile_paths : Vec<String>, urlfile_path_opt : Option<String>, goal_target_opt: Option<String>, printer: &mut PrinterType, ) -> Result<(), BuildError> { let mut elements = match directory::init(&mut system, directory_path) { Ok(elements) => elements, Err(error) => { return match error { InitDirectoryError::FailedToReadMemoryFile(memory_error) => Err(BuildError::MemoryFileFailedToRead(memory_error)), _ => Err(BuildError::DirectoryMalfunction), } } }; let download_urls = match urlfile_path_opt { None => DownloadUrls::new(), Some(path_string) => { match read_download_urls(&system, &path_string) { Ok(download_urls) => download_urls, Err(error) => return Err(BuildError::DownloadUrlsError(error)), } } }; let mut nodes = get_nodes(&system, rulefile_paths, goal_target_opt)?; let (mut senders, mut receivers) = make_multimaps(&nodes); let mut handles = Vec::new(); let mut index : usize = 0; for mut node in nodes.drain(..) { let sender_vec = match senders.remove(&index) { Some(v) => v, None => vec![], }; let receiver_vec = match receivers.remove(&index) { Some(v) => v, None => vec![], }; let mut target_infos = vec![]; for target_path in node.targets.drain(..) { target_infos.push( TargetFileInfo { history : elements.memory.take_target_history(&target_path), path : target_path, } ); } let mut downloader_cache_urls = vec![]; let mut downloader_history_urls = vec![]; for url in &download_urls.urls { downloader_cache_urls.push(format!("{}/files", url)); downloader_history_urls.push(format!("{}/rules", url)); } let downloader_cache = DownloaderCache::new(downloader_cache_urls); let downloader_history = DownloaderHistory::new(downloader_history_urls); let system_clone = system.clone(); handles.push( ( node.rule_ticket.clone(), match &node.rule_ticket { None => { thread::spawn( move || -> Result<WorkResult, BuildError> { match handle_source_only_node(system_clone, target_infos) { Ok(result) => { for (sub_index, sender) in sender_vec { match sender.send(Packet::from_ticket(result.target_tickets[sub_index].clone())) { Ok(_) => {}, Err(error) => return Err(BuildError::SenderError(error)), } } Ok(result) }, Err(error) => { for (_sub_index, sender) in sender_vec { match sender.send(Packet::cancel()) { Ok(_) => {}, Err(error) => return Err(BuildError::SenderError(error)), } } Err(BuildError::WorkError(error)) }, } } ) }, Some(ticket) => { let rule_history = match elements.history.read_rule_history(&ticket) { Ok(rule_history) => rule_history, Err(history_error) => return Err(BuildError::HistoryError(history_error)), }; let cache_clone = elements.cache.clone(); let downloader_cache_clone = downloader_cache.clone(); let downloader_rule_history = downloader_history.get_rule_history(&ticket); thread::spawn( move || -> Result<WorkResult, BuildError> { let mut info = HandleNodeInfo::new(system_clone); info.target_infos = target_infos; let sources_ticket = match wait_for_sources_ticket(receiver_vec) { Ok(sources_ticket) => sources_ticket, Err(error) => { for (_sub_index, sender) in sender_vec { match sender.send(Packet::cancel()) { Ok(_) => {}, Err(error) => return Err(BuildError::SenderError(error)), } } return Err(error); } }; match handle_rule_node(info, RuleExt { sources_ticket : sources_ticket, command : node.command, rule_history : rule_history, cache : cache_clone, downloader_cache_opt : Some(downloader_cache_clone), downloader_rule_history_opt : Some(downloader_rule_history), }) { Ok(result) => { for (sub_index, sender) in sender_vec { match sender.send(Packet::from_ticket(result.target_tickets[sub_index].clone())) { Ok(_) => {}, Err(error) => return Err(BuildError::SenderError(error)), } } Ok(result) }, Err(error) => { for (_sub_index, sender) in sender_vec { match sender.send(Packet::cancel()) { Ok(_) => {}, Err(error) => return Err(BuildError::SenderError(error)), } } Err(BuildError::WorkError(error)) }, } } ) } } ) ); index+=1; } let mut work_errors = Vec::new(); for (node_ticket, handle) in handles { match handle.join() { Ok(work_result_result) => { match work_result_result { Ok(mut work_result) => { match work_result.work_option { WorkOption::SourceOnly => { }, WorkOption::Resolutions(resolutions) => { for (i, target_info) in work_result.target_infos.iter().enumerate() { let (banner_text, banner_color) = match resolutions[i] { FileResolution::Recovered => (" Recovered", Color::Green), FileResolution::Downloaded => ("Downloaded", Color::Yellow), FileResolution::AlreadyCorrect => ("Up-to-date", Color::Cyan), FileResolution::NeedsRebuild => (" Outdated", Color::Red), }; printer.print_single_banner_line(banner_text, banner_color, &target_info.path); } }, WorkOption::CommandExecuted(output) => { for target_info in work_result.target_infos.iter() { printer.print_single_banner_line(" Built", Color::Magenta, &target_info.path); } if output.out != "" { printer.print(&output.out); } if output.err != "" { printer.error(&output.err); } if !output.success { printer.error( &format!("RESULT: {}", match output.code { Some(code) => format!("{}", code), None => "None".to_string(), } ) ); } }, } match node_ticket { Some(ticket) => { match work_result.rule_history { Some(history) => { match elements.history.write_rule_history(ticket, history) { Ok(()) => {}, Err(error) => panic!("Fatal Error: {}", error), } }, None => {}, } } None => {}, } for target_info in work_result.target_infos.drain(..) { elements.memory.insert_target_history(target_info.path, target_info.history); } }, Err(BuildError::WorkError(work_error)) => work_errors.push(work_error), Err(BuildError::Canceled) => {}, Err(error) => panic!("Unexpected build error: {}", error), } }, Err(_error) => return Err(BuildError::Weird), } } match elements.memory.to_file() { Ok(_) => {}, Err(_) => printer.error("Error writing history"), } if work_errors.len() == 0 { Ok(()) } else { Err(BuildError::WorkErrors(work_errors)) } } /* This is the function that runs when you type "ruler clean" at the command-line. It takes a rulefile, parses it and either removes all targets to the cache, or, if goal_target_opt is Some, removes only those targets that are acnestors of goal_target_opt in the depdnece-graph. */ pub fn clean<SystemType : System + 'static> ( mut system : SystemType, directory_path : &str, rulefile_paths: Vec<String>, goal_target_opt: Option<String> ) -> Result<(), BuildError> { let mut elements = match directory::init(&mut system, directory_path) { Ok(elements) => elements, Err(error) => { return match error { InitDirectoryError::FailedToReadMemoryFile(memory_error) => Err(BuildError::MemoryFileFailedToRead(memory_error)), _ => Err(BuildError::DirectoryMalfunction), } } }; let rules = match parse_all(read_all_rules(&system, rulefile_paths)?) { Ok(rules) => rules, Err(error) => return Err(BuildError::RuleFileFailedToParse(error)), }; let mut nodes = match goal_target_opt { Some(goal_target) => { match topological_sort(rules, &goal_target) { Ok(nodes) => nodes, Err(error) => return Err(BuildError::TopologicalSortFailed(error)), } }, None => { match topological_sort_all(rules) { Ok(nodes) => nodes, Err(error) => return Err(BuildError::TopologicalSortFailed(error)), } } }; let mut handles = Vec::new(); for mut node in nodes.drain(..) { let mut target_infos = Vec::new(); for target_path in node.targets.drain(..) { target_infos.push( TargetFileInfo { history : elements.memory.take_target_history(&target_path), path : target_path, } ); } let mut system_clone = system.clone(); let mut local_cache_clone = elements.cache.clone(); match node.rule_ticket { Some(_ticket) => handles.push( thread::spawn( move || -> Result<(), WorkError> { clean_targets( target_infos, &mut system_clone, &mut local_cache_clone) } ) ), None => {}, } } let mut work_errors : Vec<WorkError> = Vec::new(); for handle in handles { match handle.join() { Err(_error) => return Err(BuildError::Weird), Ok(remove_result_result) => { match remove_result_result { Ok(_) => {}, Err(work_error) => work_errors.push(work_error), } } } } if work_errors.len() == 0 { Ok(()) } else { Err(BuildError::WorkErrors(work_errors)) } } #[cfg(test)] mod test { use crate::directory; use crate::build:: { build, BuildError, }; use crate::system:: { System, fake::FakeSystem }; use crate::work::WorkError; use crate::ticket::TicketFactory; use crate::cache:: { SysCache, OpenError, }; use crate::system::util:: { write_str_to_file, read_file_to_string }; use crate::printer::EmptyPrinter; use crate::blob:: { TargetHistory }; use std::io::Write; /* Set up a filesystem and a .rules file with one poem depending on two verses as source. Populate the verses with lines of the target poem. Run the build command and check that the file appears and has the correct contents. */ #[test] fn build_basic() { let rules = "\ poem.txt : verse1.txt verse2.txt : mycat verse1.txt verse2.txt poem.txt : "; let mut system = FakeSystem::new(10); write_str_to_file(&mut system, "verse1.txt", "Roses are red.\n").unwrap(); write_str_to_file(&mut system, "verse2.txt", "Violets are violet.\n").unwrap(); write_str_to_file(&mut system, "test.rules", rules).unwrap(); build( system.clone(), "test.directory", vec!["test.rules".to_string()], None, Some("poem.txt".to_string()), &mut EmptyPrinter::new()).unwrap(); assert_eq!(read_file_to_string(&mut system, "poem.txt").unwrap(), "Roses are red.\nViolets are violet.\n"); } /* Set up a filesystem and a .rules file with one poem depending on two verses as source. Populate the verses with lines of the target poem, except, omit one of the source files. Run the build command and check that it errors sensibly. */ #[test] fn build_one_source_file_missing() { let rules = "\ poem.txt : verse1.txt verse2.txt : mycat verse1.txt verse2.txt poem.txt : "; let mut system = FakeSystem::new(10); write_str_to_file(&mut system, "verse1.txt", "Roses are red.\n").unwrap(); write_str_to_file(&mut system, "test.rules", rules).unwrap(); match build( system.clone(), "test.directory", vec!["test.rules".to_string()], None, Some("poem.txt".to_string()), &mut EmptyPrinter::new()) { Ok(_) => panic!("unexpected success"), Err(BuildError::WorkErrors(errors)) => { assert_eq!(errors.len(), 1); match &errors[0] { WorkError::FileNotFound(path_str) => assert_eq!(path_str, "verse2.txt"), _ => panic!("Got work error but not the correct error: {}", errors[0]), } }, Err(error) => panic!("Got error but not the correct error: {}", error), } } #[test] fn build_one_dependence() { let rules = "\ stanza1.txt : verse1.txt : mycat verse1.txt stanza1.txt : poem.txt : stanza1.txt : mycat stanza1.txt poem.txt : "; let mut system = FakeSystem::new(10); write_str_to_file(&mut system, "verse1.txt", "I looked over Jordan, and what did I see?\n").unwrap(); write_str_to_file(&mut system, "test.rules", rules).unwrap(); match build( system.clone(), "test.directory", vec!["test.rules".to_string()], None, Some("poem.txt".to_string()), &mut EmptyPrinter::new()) { Ok(_) => { assert_eq!(read_file_to_string(&mut system, "poem.txt").unwrap(), "I looked over Jordan, and what did I see?\n"); }, Err(error) => panic!("Unexpected error: {}", error), } } #[test] fn build_one_dependence_with_intermediate_already_present() { let rules = "\ stanza1.txt : verse1.txt : mycat verse1.txt stanza1.txt : poem.txt : stanza1.txt : mycat stanza1.txt poem.txt : "; let mut system = FakeSystem::new(10); write_str_to_file(&mut system, "verse1.txt", "I looked over Jordan, and what did I see?\n").unwrap(); write_str_to_file(&mut system, "stanza1.txt", "Some wrong content\n").unwrap(); write_str_to_file(&mut system, "test.rules", rules).unwrap(); match build( system.clone(), "test.directory", vec!["test.rules".to_string()], None, Some("poem.txt".to_string()), &mut EmptyPrinter::new()) { Ok(_) => { assert_eq!(read_file_to_string(&mut system, "poem.txt").unwrap(), "I looked over Jordan, and what did I see?\n"); }, Err(error) => panic!("Unexpected error: {}", error), } } /* Rules for a poem with two verses and a refrain. Try building the poem three times, once with each source file omitted. Check that the error matches the missing file. */ #[test] fn build_poem_with_various_omitted_sources() { let rules = "\ stanza1.txt : verse1.txt refrain.txt : mycat verse1.txt refrain.txt stanza1.txt : stanza2.txt : verse2.txt refrain.txt : mycat verse2.txt refrain.txt stanza2.txt : poem.txt : stanza1.txt stanza2.txt : mycat stanza1.txt stanza2.txt poem.txt : "; for omit_me in ["verse1.txt", "verse2.txt", "refrain.txt"] { let mut system = FakeSystem::new(10); if omit_me != "verse1.txt" { write_str_to_file(&mut system, "verse1.txt", "I looked over Jordan, and what did I see?\n").unwrap(); } if omit_me != "verse2.txt" { write_str_to_file(&mut system, "verse2.txt", "A band of angels comin' after me\n").unwrap(); } if omit_me != "refrain.txt" { write_str_to_file(&mut system, "refrain.txt", "Comin' for to carry me home\n").unwrap(); } write_str_to_file(&mut system, "test.rules", rules).unwrap(); match build( system.clone(), "test.directory", vec!["test.rules".to_string()], None, Some("poem.txt".to_string()), &mut EmptyPrinter::new()) { Ok(_) => panic!("unexpected success"), Err(BuildError::WorkErrors(errors)) => { assert_eq!(errors.len(), 1); match &errors[0] { WorkError::FileNotFound(path_str) => assert_eq!(path_str, omit_me), _ => panic!("When omitting {}, Got work error but not the correct error: {}", omit_me, errors[0]), } }, Err(error) => panic!("When omitting {}, Got error but not the correct error: {}", omit_me, error), } } } /* Set up a filesystem and a .rules file with invalid UTF8 in it instead of rules. Check that the build fails with a message about UTF8 */ #[test] fn build_rulefile_not_utf8() { let mut system = FakeSystem::new(11); write_str_to_file(&mut system, "verse1.txt", "Roses are red.\n").unwrap(); write_str_to_file(&mut system, "verse2.txt", "Violets are violet.\n").unwrap(); system.create_file("test.rules").unwrap().write_all(&[0x80u8]).unwrap(); match build( system.clone(), "test.directory", vec!["test.rules".to_string()], None, Some("poem.txt".to_string()), &mut EmptyPrinter::new()) { Ok(_) => panic!("Unexpected success with invalid rules file."), Err(BuildError::RuleFileNotUTF8) => {}, Err(error) => panic!("Got error but not the correct error: {}", error), } } /* Set up a filesystem and a .rules file with one real dependence missing from the rules. Build once, make sure it goes as planned, then change the contents of the omitted source file. Check that Building again produces a particular error: a contradiction. */ #[test] fn build_with_missing_source() { let rules = "\ poem.txt : verse1.txt : mycat verse1.txt verse2.txt poem.txt : "; let mut system = FakeSystem::new(10); system.create_dir(".ruler-cache").unwrap(); write_str_to_file(&mut system, "verse1.txt", "Roses are red.\n").unwrap(); write_str_to_file(&mut system, "verse2.txt", "Violets are blue.\n").unwrap(); write_str_to_file(&mut system, "test.rules", rules).unwrap(); build( system.clone(), "test.directory", vec!["test.rules".to_string()], None, Some("poem.txt".to_string()), &mut EmptyPrinter::new()).unwrap(); system.time_passes(1); assert_eq!(read_file_to_string(&mut system, "poem.txt").unwrap(), "Roses are red.\nViolets are blue.\n"); write_str_to_file(&mut system, "verse2.txt", "Violets are violet.\n").unwrap(); write_str_to_file(&mut system, "poem.txt", "Wrong content forcing a rebuild").unwrap(); match build( system.clone(), "test.directory", vec!["test.rules".to_string()], None, Some("poem.txt".to_string()), &mut EmptyPrinter::new()) { Ok(()) => panic!("Unexpected silence when contradiction should arise"), Err(error) => { match error { BuildError::WorkErrors(work_errors) => { assert_eq!(work_errors.len(), 1); match &work_errors[0] { WorkError::Contradiction(paths) => assert_eq!(paths, &vec!["poem.txt".to_string()]), _ => panic!("Wrong type of WorkError"), } } _ => panic!("Wrong type of error"), } }, } assert_eq!(read_file_to_string(&mut system, "poem.txt").unwrap(), "Roses are red.\nViolets are violet.\n"); } /* Set up filesystem to build a poem with two verses. Invoke the build, and check the resulting poem. */ #[test] fn build_change_build_check_cache() { let rules = "\ poem.txt : verse1.txt verse2.txt : mycat verse1.txt verse2.txt poem.txt : "; let mut system = FakeSystem::new(10); write_str_to_file(&mut system, "verse1.txt", "Roses are red.\n").unwrap(); write_str_to_file(&mut system, "verse2.txt", "Violets are blue.\n").unwrap(); write_str_to_file(&mut system, "test.rules", rules).unwrap(); build( system.clone(), ".ruler", vec!["test.rules".to_string()], None, Some("poem.txt".to_string()), &mut EmptyPrinter::new()).unwrap(); system.time_passes(1); assert_eq!(read_file_to_string(&mut system, "poem.txt").unwrap(), "Roses are red.\nViolets are blue.\n"); let ticket = TicketFactory::from_file(&system, "poem.txt").unwrap().result(); write_str_to_file(&mut system, "verse2.txt", "Violets are violet.\n").unwrap(); build( system.clone(), ".ruler", vec!["test.rules".to_string()], None, Some("poem.txt".to_string()), &mut EmptyPrinter::new()).unwrap(); assert_eq!(read_file_to_string(&mut system, "poem.txt").unwrap(), "Roses are red.\nViolets are violet.\n"); let mut cache = SysCache::new(system.clone(), ".ruler/cache"); cache.restore_file(&ticket, "temp-poem.txt"); assert_eq!(read_file_to_string(&mut system, "temp-poem.txt").unwrap(), "Roses are red.\nViolets are blue.\n"); cache.back_up_file_with_ticket(&ticket, "temp-poem.txt").unwrap(); write_str_to_file(&mut system, "verse2.txt", "Violets are blue.\n").unwrap(); build( system.clone(), ".ruler", vec!["test.rules".to_string()], None, Some("poem.txt".to_string()), &mut EmptyPrinter::new()).unwrap(); assert_eq!(read_file_to_string(&mut system, "poem.txt").unwrap(), "Roses are red.\nViolets are blue.\n"); } /* Set up filesystem to build a poem with incorrect rules, which say they generate a target, but actually do not. */ #[test] fn build_command_fails_to_generate_target() { let rules = "\ poem.txt : verse1.txt verse2.txt : mycat verse1.txt verse2.txt someotherpoem.txt : "; let mut system = FakeSystem::new(10); write_str_to_file(&mut system, "verse1.txt", "Roses are red.\n").unwrap(); write_str_to_file(&mut system, "verse2.txt", "Violets are blue.\n").unwrap(); write_str_to_file(&mut system, "test.rules", rules).unwrap(); match build( system.clone(), ".ruler", vec!["test.rules".to_string()], None, Some("poem.txt".to_string()), &mut EmptyPrinter::new()) { Ok(_) => panic!("unexpected success"), Err(BuildError::WorkErrors(errors)) => { assert_eq!(errors.len(), 1); match &errors[0] { WorkError::TargetFileNotGenerated(path_str) => assert_eq!(path_str, "poem.txt"), _ => panic!("Got work error but not the correct error: {}", errors[0]), } }, Err(error) => panic!("Got error but not the correct error: {}", error), } } #[test] fn build_check_target_history() { let rules = "\ poem.txt : verse1.txt verse2.txt : mycat verse1.txt verse2.txt poem.txt : "; let mut system = FakeSystem::new(17); write_str_to_file(&mut system, "verse1.txt", "Roses are red.\n").unwrap(); write_str_to_file(&mut system, "verse2.txt", "Violets are violet.\n").unwrap(); write_str_to_file(&mut system, "test.rules", rules).unwrap(); { let mut elements = directory::init(&mut system, "ruler-directory").unwrap(); let target_history_before = elements.memory.take_target_history("poem.txt"); assert_eq!(target_history_before, TargetHistory::empty()); elements.memory.insert_target_history("poem.txt".to_string(), target_history_before); } build( system.clone(), "ruler-directory", vec!["test.rules".to_string()], None, Some("poem.txt".to_string()), &mut EmptyPrinter::new()).unwrap(); assert_eq!(read_file_to_string(&mut system, "poem.txt").unwrap(), "Roses are red.\nViolets are violet.\n"); { let mut elements = directory::init(&mut system, "ruler-directory").unwrap(); let target_history = elements.memory.take_target_history("poem.txt"); assert_eq!(target_history, TargetHistory::new( TicketFactory::from_str("Roses are red.\nViolets are violet.\n").result(), 17)); elements.memory.insert_target_history("poem.txt".to_string(), target_history); } } #[test] fn build_first_does_not_cache() { let rules = "\ poem.txt : verse1.txt verse2.txt : mycat verse1.txt verse2.txt poem.txt : "; let mut system = FakeSystem::new(19); write_str_to_file(&mut system, "verse1.txt", "Roses are red.\n").unwrap(); write_str_to_file(&mut system, "verse2.txt", "Violets are violet.\n").unwrap(); write_str_to_file(&mut system, "test.rules", rules).unwrap(); build( system.clone(), "ruler-directory", vec!["test.rules".to_string()], None, Some("poem.txt".to_string()), &mut EmptyPrinter::new()).unwrap(); assert_eq!( read_file_to_string(&mut system, "poem.txt").unwrap(), "Roses are red.\nViolets are violet.\n"); let elements = directory::init(&mut system, "ruler-directory").unwrap(); match elements.cache.open(&TicketFactory::from_str("Roses are red.\nViolets are violet.\n").result()) { Ok(_file) => panic!("Unexpected cache presence after first build"), Err(OpenError::NotThere) => {}, Err(_) => panic!("Unexpected error trying to access cache after first build"), } } }
use charts::{Chart, ScaleLinear, MarkerType, PointLabelPosition, Color, ScatterView, LineSeriesView}; pub fn normalize_elem(el: f64, min: f64, max: f64) -> f64 { (el - min) / (max - min) } fn denormalize_elem(el: f64, min: f64, max: f64) -> f64 { (el * (max - min)) + min } pub struct DataMM { pub min_0: f64, pub max_0: f64, pub min_1: f64, pub max_1: f64, } pub fn save_final_chart(data: &Vec<(f64, f64)>, theta_0: f64, theta_1: f64, labels: &(String, String), mm: &DataMM) { let width = 1000; let height = 700; let (top, right, bottom, left) = (50, 40, 50, 60); let mut parsed_data: Vec<(f32, f32)> = Vec::new(); for el in data { parsed_data.push((el.0 as f32, el.1 as f32)); } let mut max_x = f32::MIN; let mut max_y = f32::MIN; let mut min_x = f32::MAX; let mut min_y = f32::MAX; for el in &parsed_data { if el.0 > max_x { max_x = el.0; } if el.1 > max_y { max_y = el.1; } if el.0 < min_x { min_x = el.0; } if el.1 < min_y { min_y = el.1; } } let x = ScaleLinear::new() .set_domain(vec![min_x as f32, max_x]) // edit here to modify horizontal start value on final_graph.svg .set_range(vec![0, width - left - right]); let y = ScaleLinear::new() .set_domain(vec![min_y as f32, max_y]) // edit here to modify vertical start value on final_graph.svg .set_range(vec![height - top - bottom, 0]); // Create Scatter series view that is going to represent the data. let scatter_view = ScatterView::new() .set_x_scale(&x) .set_y_scale(&y) .set_label_position(PointLabelPosition::E) .set_marker_type(MarkerType::Square) .set_colors(Color::from_vec_of_hex_strings(vec!["#409EFF"])) .set_label_visibility(false) .load_data(&parsed_data).unwrap(); let start_curve = denormalize_elem(theta_0 + theta_1 * normalize_elem(0 as f64, mm.min_0, mm.max_0), mm.min_1, mm.max_1); let end_curve = denormalize_elem(theta_0 + theta_1 * normalize_elem(max_x as f64 * 1.2, mm.min_0, mm.max_0), mm.min_1, mm.max_1); let curve = vec![(0 as f32, start_curve as f32), (max_x * 1.2 as f32, end_curve as f32)]; // Create Line series view that is going to represent the data. let line_view = LineSeriesView::new() .set_x_scale(&x) .set_y_scale(&y) .set_marker_type(MarkerType::Circle) .set_colors(Color::from_vec_of_hex_strings(vec!["#F56C6C"])) .set_label_visibility(false) .load_data(&curve).unwrap(); Chart::new() .set_width(width) .set_height(height) .set_margins(top, right, bottom, left) .add_title(String::from("Linear Regression 42")) .add_view(&scatter_view) .add_view(&line_view) .add_axis_bottom(&x) .add_axis_left(&y) .add_bottom_axis_label(&labels.0) .add_left_axis_label(&labels.1) .save("charts/chart_final.svg").unwrap(); } pub fn save_line_chart(data: &Vec<(f64, f64)>, labels: &(String, String)) { let width = 1000; let height = 700; let (top, right, bottom, left) = (90, 40, 50, 60); let mut parsed_data: Vec<(f32, f32)> = Vec::new(); for el in data { parsed_data.push((el.0 as f32, el.1 as f32)); } let mut max_x = f32::MIN; let mut max_y = f32::MIN; let mut min_x = f32::MAX; let mut min_y = f32::MAX; for el in &parsed_data { if el.0 > max_x { max_x = el.0; } if el.0 < min_x { min_x = el.0; } if el.1 > max_y { max_y = el.1; } if el.1 < min_y { min_y = el.1; } } let x = ScaleLinear::new() .set_domain(vec![min_x as f32, max_x * 1.1]) .set_range(vec![0, width - left - right]); let y = ScaleLinear::new() .set_domain(vec![min_y as f32, max_y * 1.5]) .set_range(vec![height - top - bottom, 0]); // Create Line series view that is going to represent the data. let line_view = LineSeriesView::new() .set_x_scale(&x) .set_y_scale(&y) .set_marker_type(MarkerType::Circle) .set_colors(Color::from_vec_of_hex_strings(vec!["#F56C6C"])) .set_label_visibility(false) .load_data(&parsed_data).unwrap(); Chart::new() .set_width(width) .set_height(height) .set_margins(top, right, bottom, left) .add_title(String::from("Learning curve")) .add_view(&line_view) .add_axis_bottom(&x) .add_axis_left(&y) .add_bottom_axis_label(&labels.0) .add_left_axis_label(&labels.1) .save("charts/chart_learning_curve.svg").unwrap(); } pub fn save_cost_chart(costs: &Vec<f64>) { let width = 1000; let height = 700; let (top, right, bottom, left) = (50, 40, 50, 85); let mut parsed_cost: Vec<(f32, f32)> = Vec::new(); let mut i: f32 = 0.0; for el in costs { parsed_cost.push((i, *el as f32)); i+= 1.0; } let mut max_x = f32::MIN; let mut max_y = f32::MIN; let mut min_x = f32::MAX; let mut min_y = f32::MAX; for el in &parsed_cost { if el.0 > max_x { max_x = el.0; } if el.1 > max_y { max_y = el.1; } if el.0 < min_x { min_x = el.0; } if el.1 < min_y { min_y = el.1; } } let x = ScaleLinear::new() .set_domain(vec![min_x as f32, max_x]) // edit here to modify horizontal start value on cost_graph.svg .set_range(vec![0, width - left - right]); let y = ScaleLinear::new() .set_domain(vec![min_y as f32, max_y]) // edit here to modify vertical start value on cost_graph.svg .set_range(vec![height - top - bottom, 0]); // Create Line series view that is going to represent the data. let line_view = LineSeriesView::new() .set_x_scale(&x) .set_y_scale(&y) .set_marker_type(MarkerType::Circle) .set_colors(Color::from_vec_of_hex_strings(vec!["#F56C6C"])) .set_label_visibility(false) .load_data(&parsed_cost).unwrap(); Chart::new() .set_width(width) .set_height(height) .set_margins(top, right, bottom, left) .add_title(String::from("MSE chart")) .add_view(&line_view) .add_axis_bottom(&x) .add_axis_left(&y) .add_bottom_axis_label("Iteration") .add_left_axis_label("Cost") .save("charts/chart_mse.svg").unwrap(); }
//! Running Code on Cleanup with the [Drop Trait] //! //! [drop trait]: https://doc.rust-lang.org/book/ch15-03-drop.html use std::{ fmt::Debug, ops::{Deref, DerefMut}, }; #[derive(Debug)] pub struct SmartPointer<T: Debug + Clone>(T); impl<T: Debug + Clone> SmartPointer<T> { pub fn new(x: &T) -> Self { Self(x.clone()) } } impl<T: Debug + Clone> Drop for SmartPointer<T> { fn drop(&mut self) { println!("dropping {:?}", self.0); } } impl<T: Debug + Clone> Deref for SmartPointer<T> { type Target = T; fn deref(&self) -> &Self::Target { &self.0 } } impl<T: Debug + Clone> DerefMut for SmartPointer<T> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } #[cfg(test)] mod tests { use super::SmartPointer; #[test] fn new_i32() { let x = 1_i32; let y = SmartPointer::new(&x); assert_eq!(1, x); assert_eq!(1, *y); } #[test] fn new_string() { let x = String::from("Hi"); let y = SmartPointer::new(&x); let want = String::from("Hi"); assert_eq!(want, x); assert_eq!(want, *y); } #[test] fn deref_mut_i32() { let x = 3_i32; let mut y = SmartPointer::new(&x); *y = 9; assert_eq!(3, x); assert_eq!(9, *y); } #[test] fn deref_mut_string() { let x = String::from("Rust"); let mut y = SmartPointer::new(&x); *y = String::from("Rustacian"); assert_eq!(String::from("Rust"), x); assert_eq!(String::from("Rustacian"), *y); } }
// Copyright 2022 The Tari Project // SPDX-License-Identifier: BSD-3-Clause //! Tari-Crypto #![no_std] #[allow(unused_imports)] #[macro_use] extern crate alloc; #[cfg(any(feature = "bulletproofs_plus", test))] #[macro_use] extern crate std; #[macro_use] mod macros; pub mod commitment; pub mod deterministic_randomizer; pub mod dhke; pub mod hashing; pub mod keys; #[cfg(feature = "bulletproofs_plus")] pub mod range_proof; #[cfg(feature = "bulletproofs_plus")] pub mod rewindable_range_proof; pub mod signatures; // Implementations #[allow(clippy::op_ref)] pub mod ristretto; pub mod errors; #[cfg(feature = "bulletproofs_plus")] pub mod extended_range_proof; // Re-export tari_utils pub use tari_utilities;
use cancellation::CancellationToken; use crossbeam::channel::{select, unbounded}; use std::sync::Arc; use crate::hash_file_process::{ FileProgress, HashFileProcessResult, HashFileProcessType, HashFileProcessor, }; use crate::output::Output; pub struct UI { processor: HashFileProcessor, silent: bool, } impl UI { pub fn new(processor: HashFileProcessor, silent: bool) -> UI { UI { processor, silent } } pub fn run( mut self, cancellation_token: Arc<CancellationToken>, process_type: HashFileProcessType, ) -> HashFileProcessResult { let silent = self.silent; let (error_sender, error_receiver) = unbounded(); let (warning_sender, warning_receiver) = unbounded(); let (progress_sender, progress_receiver) = unbounded(); let (complete_sender, complete_receiver) = unbounded(); self.processor.set_error_event_sender(error_sender.clone()); self.processor .set_warning_event_sender(warning_sender.clone()); if !silent { self.processor .set_progress_event_sender(progress_sender.clone()); self.processor .set_complete_event_sender(complete_sender.clone()); } let message_loop = std::thread::spawn(move || { let mut error_sender_dropped = false; let mut warning_sender_dropped = false; let mut progress_sender_dropped = silent; let mut senders_dropped = false; let mut skip_processed = false; let mut output = Output::new(); let mut file_progress = FileProgress { ..Default::default() }; output.write_init(); while !senders_dropped { select! { recv(progress_receiver) -> msg => { if let Ok(args) = msg { if args.bytes_processed == 0 { if file_progress.file_path != "" && !skip_processed { output.write_processed(&file_progress.file_path); } skip_processed = false; file_progress = FileProgress { ..args }; } else { file_progress.bytes_processed = args.bytes_processed; } output.write_progress(&file_progress); } else { progress_sender_dropped = true; } }, recv(error_receiver) -> msg => { if let Ok(error) = msg { skip_processed = true; output.write_error(&error); } else { error_sender_dropped = true; } }, recv(warning_receiver) -> msg => { if let Ok(warning) = msg { skip_processed = true; output.write_error(&warning); } else { warning_sender_dropped = true; } } } senders_dropped = progress_sender_dropped && error_sender_dropped && warning_sender_dropped; } if !silent && !skip_processed { output.write_processed(&file_progress.file_path); } }); let process = std::thread::spawn(move || { let result = self .processor .process_with_cancellation_token(cancellation_token); drop(error_sender); drop(warning_sender); drop(progress_sender); result }); message_loop.join().unwrap(); if !silent { if let Ok(result) = complete_receiver.recv() { let output = Output::new(); if result == HashFileProcessResult::Canceled { output.clear_line(); } else { output.write_result(format!("{:?} result: {:?}", process_type, result)); } } } drop(complete_sender); process.join().unwrap() } }
/* * Datadog API V1 Collection * * Collection of all Datadog Public endpoints. * * The version of the OpenAPI document: 1.0 * Contact: support@datadoghq.com * Generated by: https://openapi-generator.tech */ /// GeomapWidgetDefinitionStyle : The style to apply to the widget. #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GeomapWidgetDefinitionStyle { /// The color palette to apply to the widget. #[serde(rename = "palette")] pub palette: String, /// Whether to flip the palette tones. #[serde(rename = "palette_flip")] pub palette_flip: bool, } impl GeomapWidgetDefinitionStyle { /// The style to apply to the widget. pub fn new(palette: String, palette_flip: bool) -> GeomapWidgetDefinitionStyle { GeomapWidgetDefinitionStyle { palette, palette_flip, } } }
use nom::branch::alt; use nom::bytes::complete::tag; use nom::combinator::{map, value}; use nom::multi::many0; use nom::IResult; use crate::ast; pub(crate) fn parse(inp: &str) -> IResult<&str, ast::Node> { use nom::combinator::all_consuming; all_consuming(parse_expr)(inp) } pub(crate) fn parse_expr(inp: &str) -> IResult<&str, ast::Node> { parse_add(inp) } pub(crate) fn parse_add(inp: &str) -> IResult<&str, ast::Node> { use nom::sequence::pair; #[derive(Clone)] enum AddOp { Add, Sub, } map( pair( parse_prod, many0(pair( alt((value(AddOp::Add, tag("+")), value(AddOp::Sub, tag("-")))), parse_prod, )), ), |(left, rights)| { rights .into_iter() .fold(left, |left, right_op| match right_op { (AddOp::Add, right) => ast::Node::add(left, right), (AddOp::Sub, right) => ast::Node::sub(left, right), }) }, )(inp) } pub(crate) fn parse_prod(inp: &str) -> IResult<&str, ast::Node> { use nom::sequence::pair; #[derive(Clone)] enum ProdOp { Mul, Div, } map( pair( parse_atom, many0(pair( alt((value(ProdOp::Mul, tag("*")), value(ProdOp::Div, tag("/")))), parse_prod, )), ), |(left, rights)| { rights .into_iter() .fold(left, |left, right_op| match right_op { (ProdOp::Mul, second) => ast::Node::mul(left, second), (ProdOp::Div, second) => ast::Node::div(left, second), }) }, )(inp) } fn parse_atom(inp: &str) -> IResult<&str, ast::Node> { use nom::character::complete::alpha1; use nom::character::complete::i32; use nom::sequence::delimited; alt(( map(i32, ast::Node::literal), map(alpha1, ast::Node::var), delimited(tag("("), parse_expr, tag(")")), ))(inp) }
extern crate data_structure; use data_structure::LinkedStack; #[test] fn test_push() { let mut stack = LinkedStack::new(); for i in 0..10 { stack.push(i); } assert_eq!( "len(10) : (start)->(0)->(1)->(2)->(3)->(4)->(5)->(6)->(7)->(8)->(9)", format!("{:?}", stack) ); } #[test] fn test_pop() { let mut stack = LinkedStack::new(); for i in 0..10 { stack.push(i); } assert_eq!(Some(9), stack.pop()); stack.pop(); stack.pop(); assert_eq!(Some(6), stack.pop()); stack.pop(); stack.pop(); stack.pop(); assert_eq!(Some(2), stack.pop()); stack.pop(); stack.pop(); assert_eq!(None, stack.pop()); }
use amethyst::{ ecs::prelude::*, input::{InputHandler, StringBindings}, }; use crate::resources::{Board, Game, MessageChannel, MovingDirection, Msg, State}; use std::collections::{HashSet, VecDeque}; #[derive(Debug)] pub struct InputSystem { message_reader: Option<ReaderId<Msg>>, pressing: HashSet<String>, key_queue: VecDeque<MovingDirection>, } impl Default for InputSystem { fn default() -> Self { InputSystem { message_reader: None, pressing: HashSet::new(), key_queue: VecDeque::new(), } } } impl<'s> System<'s> for InputSystem { type SystemData = ( Read<'s, InputHandler<StringBindings>>, Write<'s, MessageChannel>, ReadExpect<'s, Board>, ReadExpect<'s, Game>, ); fn setup(&mut self, res: &mut Resources) { Self::SystemData::setup(res); self.message_reader = Some(res.fetch_mut::<MessageChannel>().register_reader()); } fn run(&mut self, (inputs, mut messages, board, game): Self::SystemData) { if &State::Main != game.get_state() { return; } for axis in inputs.bindings.axes() { let value = inputs.axis_value(axis).unwrap(); let was_down = self.pressing.contains(axis.as_str()); let maybe_direction = MovingDirection::from_axis(axis, value); let is_down = maybe_direction.is_some(); if was_down && !is_down { self.pressing.remove(axis.as_str()); } if !was_down && is_down { // new press self.pressing.insert(axis.clone()); self.key_queue.push_back(maybe_direction.unwrap()); } let mut is_tick = false; for message in messages.read(self.message_reader.as_mut().unwrap()) { if let Msg::Tick(_) = message { is_tick = true; } } if is_tick { if let Some(direction) = self.key_queue.pop_front() { if board.input_valid(&direction) { messages.single_write(Msg::Move(direction.clone())); } } else { messages.single_write(Msg::Move(board.current_direction().clone())); } } } } }
mod request; use request::{Request, RequestMethod}; mod response; use response::Response; mod cpanel; mod host; mod database; const DB_FILE: &str = "dyndns.db"; fn main() { let mut response = Response::new(); // For now, assume we'll always be returning plain text response.add_header("Content-Type: text/plain"); let request = match Request::parse() { Ok(val) => val, Err(_err) => { eprintln!("Couldn't parse request: {}", _err); response.set_status_code(500); response.send(); return }, }; let db = database::open(DB_FILE); if request.debug { response.add_content(&format!("{:#?}", request)); } match request.method { RequestMethod::Get => { let existing_host = host::read(&db, &request.host); response.set_status_code(200); response.add_content(&format!("{:#?}", existing_host)); }, RequestMethod::Post => { // Use the supplied IP address if available, fall back to the client IP let new_ip = if request.ip != "" { request.ip } else { request.remote_address }; let updated_host = host::update(&db, &request.host, &new_ip); response.set_status_code(200); response.add_content(&format!("{:#?}", updated_host)); }, _ => { // Bad request response.set_status_code(400); }, } response.send(); }
/* https://projecteuler.net Surprisingly there are only three numbers that can be written as the sum of fourth powers of their digits: 1634 = 1^4 + 6^4 + 3^4 + 4^4 8208 = 8^4 + 2^4 + 0^4 + 8^4 9474 = 9^4 + 4^4 + 7^4 + 4^4 As 1 = 1^4 is not a sum it is not included. The sum of these numbers is 1634 + 8208 + 9474 = 19316. Find the sum of all the numbers that can be written as the sum of fifth powers of their digits. */ const P : u32 = 5; //const POWS : Vec<u64> = vec![0_u64, 1_u64.pow(P), 2_u64.pow(P), 3_u64.pow(P), 4_u64.pow(P), 5_u64.pow(P), //6_u64.pow(P), 7_u64.pow(P), 8_u64.pow(P), 9_u64.pow(P)]; fn solve() -> u64 { let mut v = std::vec::Vec::<u64>::with_capacity(99*99); let pows = vec![0_u64.pow(P), 1_u64.pow(P), 2_u64.pow(P), 3_u64.pow(P), 4_u64.pow(P), 5_u64.pow(P), 6_u64.pow(P), 7_u64.pow(P), 8_u64.pow(P), 9_u64.pow(P)]; for a in 0..10_u64 { let sum1 = pows[a as usize]; let num1 = a; for b in 0..10_u64 { let sum2 = sum1 + pows[b as usize]; let num2 = num1 + 10 * b; if b > 0 && num2 == sum2 { v.push(num2); } for c in 0..10_u64 { let sum3 = sum2 + pows[c as usize]; let num3 = num2 + 100 * c; if c > 0 && num3 == sum3 { v.push(num3); } for d in 0..10_u64 { let sum4 = sum3 + pows[d as usize]; let num4 = num3 + 1000 * d; if d > 0 && num4 == sum4 { v.push(num4); } for e in 0..10_u64 { let sum5 = sum4 + pows[e as usize]; let num5 = num4 + 10000 * e; if e > 0 && num5 == sum5 { v.push(num5); } for f in 1..10_u64 { let sum6 = sum5 + pows[f as usize]; let num6 = num5 + 100000 * f; if num6 == sum6 { v.push(num6); } } } } } } } //println!("{:?}", &v); return v.iter().sum(); } fn main() { let start_time = std::time::Instant::now(); let sol = solve(); let elapsed = start_time.elapsed().as_micros(); println!("\nSolution: {}", sol); //println!("Elasped time: {} us", elapsed); let mut remain = elapsed; let mut s = String::new(); if remain == 0 { s.insert(0,'0'); } while remain > 0 { let temp = remain%1000; remain /= 1000; if remain > 0 { s = format!(",{:03}",temp) + &s; } else { s = format!("{}",temp) + &s; } } println!("Elasped time: {} us", s); }
use std::sync::{Arc, RwLock}; use std::thread; use std::time::Duration; use slog_scope::{error, info}; use crate::components::access_token_provider::AccessTokenProvider; use super::util; pub enum SupervisorCommands { Terminate, } pub trait SpotifyConnector { fn wait_until_ready(&self) -> Result<(), util::JukeboxError> { let n_attempts = 30; for _idx in 0..n_attempts { if self.device_id().is_some() { info!("Initial Device ID retrieved"); return Ok(()); } thread::sleep(Duration::from_millis(500)); } error!("Failed to wait for initial Device ID"); Err(util::JukeboxError::DeviceNotFound { device_name: "FIXME".to_string(), }) } fn device_id(&self) -> Option<String>; fn request_restart(&self); } pub mod external_command { use super::*; use crate::effects::spotify::{self, util::JukeboxError}; use failure::{Context, Fallible}; use slog_scope::{error, info, warn}; use std::env; use std::process::{Child, Command}; use std::thread::{self, JoinHandle}; use std::time::Duration; pub struct ExternalCommand { device_id: Arc<RwLock<Option<String>>>, // status: Receiver<T>, child: Arc<RwLock<Child>>, // command: Sender<SupervisorCommands>, _supervisor: JoinHandle<()>, } struct SupervisedCommand { pub cmd: String, pub device_name: String, pub username: String, pub password: String, pub cache_directory: String, pub device_id: Arc<RwLock<Option<String>>>, pub librespot_cmd: String, pub access_token_provider: AccessTokenProvider, child: Arc<RwLock<Child>>, } impl Drop for SupervisedCommand { fn drop(&mut self) { if let Err(err) = self.child.write().unwrap().kill() { error!( "Failed to terminate supervised librespot while dropiing SupervisedCommand: {}", err ); } } } impl SupervisedCommand { // fn kill_child(&mut self) -> Result<(), std::io::Error> { // self.child.write().unwrap().kill() // } // fn spawn( username: &str, password: &str, device_name: &str, librespot_cmd: &str, cache_directory: &str, ) -> Result<Child, std::io::Error> { Command::new(librespot_cmd) .arg("--name") .arg(device_name) .arg("--username") .arg(username) .arg("--password") .arg(password) .arg("--bitrate") .arg("160") .arg("--cache") .arg(cache_directory) .arg("--enable-volume-normalisation") .arg("--linear-volume") .arg("--initial-volume=100") .spawn() } fn respawn(&mut self) -> Result<(), std::io::Error> { let child = Self::spawn( &self.username, &self.password, &self.device_name, &self.librespot_cmd, &self.cache_directory, )?; *(self.child.write().unwrap()) = child; Ok(()) } fn spawn_supervisor(self) -> JoinHandle<()> { info!("Spawning supervisor for Spotify Connect command"); thread::Builder::new() .name("spotify-supervisor".to_string()) .spawn(move || Self::supervisor(self)) .unwrap() } fn spawn_device_id_watcher(&self) -> JoinHandle<()> { info!("Spawning device ID watcher for Spotify Connect command"); let access_token_provider = Arc::new(self.access_token_provider.clone()); let device_name = self.device_name.clone(); let device_id = Arc::clone(&self.device_id); let child = Arc::clone(&self.child); thread::Builder::new() .name("spotify-device-watcher".to_string()) .spawn(move || { thread::sleep(Duration::from_secs(2)); Self::device_id_watcher(access_token_provider, device_name, device_id, child) }) .unwrap() } fn device_id_watcher( access_token_provider: Arc<AccessTokenProvider>, device_name: String, device_id: Arc<RwLock<Option<String>>>, child: Arc<RwLock<Child>>, ) { loop { // info!("device ID watcher tick"); // info!("Looking for device named '{}'", device_name); match spotify::util::lookup_device_by_name(&access_token_provider, &device_name) { Ok(device) => { *(device_id.write().unwrap()) = Some(device.id); } Err(JukeboxError::DeviceNotFound { .. }) => { warn!( "No Spotify device ID found for device name '{}'", device_name ); *(device_id.write().unwrap()) = None; // kill child if let Err(err) = child.write().unwrap().kill() { error!("Failed to terminate Spotify Connector: {}", err); } else { info!("Terminated Spotify Connector"); } } Err(err) => { error!("Failed to lookup Spotify Device ID: {}", err); // fixme, what to do here for resilience? } } thread::sleep(Duration::from_millis(10000)); } } fn supervisor(mut self) { loop { // info!("supervisor tick"); // Child is expected to be running. // Check if it has terminated for some reason: let res = { let mut writer = self.child.write().unwrap(); writer.try_wait() }; match res { Ok(Some(status)) => { // child terminated. needs to be restarted. warn!( "Spotify Connector terminated unexpectedly with status {}", status ); if let Err(err) = self.respawn() { error!("Failed to respawn Spotify Connector: {}", err); } else { let pid = self.child.read().unwrap().id(); info!("Respawned new Spotify Connector (PID {})", pid); } } Ok(None) => {} Err(err) => { error!( "Failed to check if Spotify Connector is still running: {}", err ); // fixme, what to do for resilience? } } thread::sleep(Duration::from_millis(2000)); } } pub fn new( cmd: String, device_name: &str, librespot_cmd: String, username: String, password: String, cache_directory: String, device_id: Arc<RwLock<Option<String>>>, access_token_provider: &AccessTokenProvider, ) -> Result<(Self, Arc<RwLock<Child>>), std::io::Error> { let child = Self::spawn( &username, &password, &device_name, &librespot_cmd, &cache_directory, )?; let rw_child = Arc::new(RwLock::new(child)); let supervised_cmd = SupervisedCommand { cmd, device_name: device_name.to_string().clone(), access_token_provider: access_token_provider.clone(), child: Arc::clone(&rw_child), device_id, librespot_cmd, username, password, cache_directory, }; Ok((supervised_cmd, rw_child)) } } impl ExternalCommand { pub fn new_from_env( access_token_provider: &AccessTokenProvider, device_name: String, ) -> Fallible<Self> { let cmd = env::var("SPOTIFY_CONNECT_COMMAND").map_err(Context::new)?; let username = env::var("SPOTIFY_CONNECT_USERNAME").map_err(Context::new)?; let password = env::var("SPOTIFY_CONNECT_PASSWORD").map_err(Context::new)?; let librespot_cmd = env::var("SPOTIFY_CONNECT_LIBRESPOT").map_err(Context::new)?; let cache_directory = env::var("SPOTIFY_CONNECT_CACHE_DIRECTORY").map_err(Context::new)?; Self::new( access_token_provider, cmd, device_name, username, password, cache_directory, librespot_cmd, ) } pub fn new( access_token_provider: &AccessTokenProvider, cmd: String, device_name: String, username: String, password: String, cache_directory: String, librespot_cmd: String, ) -> Fallible<Self> { let device_id = Arc::new(RwLock::new(None)); let (supervised_cmd, rw_child) = SupervisedCommand::new( cmd.to_string().clone(), &device_name, librespot_cmd, username, password, cache_directory, Arc::clone(&device_id), access_token_provider, )?; let _ = supervised_cmd.spawn_device_id_watcher(); let supervisor = supervised_cmd.spawn_supervisor(); Ok(ExternalCommand { device_id, child: rw_child, _supervisor: supervisor, }) } } impl SpotifyConnector for ExternalCommand { fn request_restart(&self) { if let Err(err) = self.child.write().unwrap().kill() { error!("While trying to restart Spotify Connector ExternalCommand, terminating the running process failed: {}", err); } else { error!("While trying to restart Spotify Connector ExternalCommand, successfully killed running process"); } } fn device_id(&self) -> Option<String> { let reader = self.device_id.read().unwrap(); (*reader).clone() } } }
//! FUSE kernel driver communication //! //! Raw communication channel to the FUSE kernel driver. #[cfg(feature = "libfuse2")] mod fuse2; #[cfg(any(feature = "libfuse", test))] mod fuse2_sys; #[cfg(feature = "libfuse3")] mod fuse3; #[cfg(feature = "libfuse3")] mod fuse3_sys; #[cfg(not(feature = "libfuse"))] mod fuse_pure; pub mod mount_options; #[cfg(any(feature = "libfuse", test))] use fuse2_sys::fuse_args; #[cfg(any(test, not(feature = "libfuse")))] use std::fs::File; use std::io; #[cfg(any(feature = "libfuse", test))] use mount_options::MountOption; /// Helper function to provide options as a fuse_args struct /// (which contains an argc count and an argv pointer) #[cfg(any(feature = "libfuse", test))] fn with_fuse_args<T, F: FnOnce(&fuse_args) -> T>(options: &[MountOption], f: F) -> T { use mount_options::option_to_string; use std::ffi::CString; let mut args = vec![CString::new("rust-fuse").unwrap()]; for x in options { args.extend_from_slice(&[ CString::new("-o").unwrap(), CString::new(option_to_string(x)).unwrap(), ]); } let argptrs: Vec<_> = args.iter().map(|s| s.as_ptr()).collect(); f(&fuse_args { argc: argptrs.len() as i32, argv: argptrs.as_ptr(), allocated: 0, }) } #[cfg(feature = "libfuse2")] pub use fuse2::Mount; #[cfg(feature = "libfuse3")] pub use fuse3::Mount; #[cfg(not(feature = "libfuse"))] pub use fuse_pure::Mount; #[cfg(not(feature = "libfuse3"))] use std::ffi::CStr; #[cfg(not(feature = "libfuse3"))] #[inline] fn libc_umount(mnt: &CStr) -> io::Result<()> { #[cfg(any( target_os = "macos", target_os = "freebsd", target_os = "dragonfly", target_os = "openbsd", target_os = "bitrig", target_os = "netbsd" ))] let r = unsafe { libc::unmount(mnt.as_ptr(), 0) }; #[cfg(not(any( target_os = "macos", target_os = "freebsd", target_os = "dragonfly", target_os = "openbsd", target_os = "bitrig", target_os = "netbsd" )))] let r = unsafe { libc::umount(mnt.as_ptr()) }; if r < 0 { Err(io::Error::last_os_error()) } else { Ok(()) } } /// Warning: This will return true if the filesystem has been detached (lazy unmounted), but not /// yet destroyed by the kernel. #[cfg(any(test, not(feature = "libfuse")))] fn is_mounted(fuse_device: &File) -> bool { use libc::{poll, pollfd}; use std::os::unix::prelude::AsRawFd; let mut poll_result = pollfd { fd: fuse_device.as_raw_fd(), events: 0, revents: 0, }; loop { let res = unsafe { poll(&mut poll_result, 1, 0) }; break match res { 0 => true, 1 => (poll_result.revents & libc::POLLERR) != 0, -1 => { let err = io::Error::last_os_error(); if err.kind() == io::ErrorKind::Interrupted { continue; } else { // This should never happen. The fd is guaranteed good as `File` owns it. // According to man poll ENOMEM is the only error code unhandled, so we panic // consistent with rust's usual ENOMEM behaviour. panic!("Poll failed with error {}", err) } } _ => unreachable!(), }; } } /// Ensures that an os error is never 0/Success fn ensure_last_os_error() -> io::Error { let err = io::Error::last_os_error(); match err.raw_os_error() { Some(0) => io::Error::new(io::ErrorKind::Other, "Unspecified Error"), _ => err, } } #[cfg(test)] mod test { use super::*; use std::{ffi::CStr, mem::ManuallyDrop}; #[test] fn fuse_args() { with_fuse_args( &[ MountOption::CUSTOM("foo".into()), MountOption::CUSTOM("bar".into()), ], |args| { let v: Vec<_> = (0..args.argc) .map(|n| unsafe { CStr::from_ptr(*args.argv.offset(n as isize)) .to_str() .unwrap() }) .collect(); assert_eq!(*v, ["rust-fuse", "-o", "foo", "-o", "bar"]); }, ); } fn cmd_mount() -> String { std::str::from_utf8( std::process::Command::new("sh") .arg("-c") .arg("mount | grep fuse") .output() .unwrap() .stdout .as_ref(), ) .unwrap() .to_owned() } #[test] fn mount_unmount() { // We use ManuallyDrop here to leak the directory on test failure. We don't // want to try and clean up the directory if it's a mountpoint otherwise we'll // deadlock. let tmp = ManuallyDrop::new(tempfile::tempdir().unwrap()); let (file, mount) = Mount::new(&tmp.path(), &[]).unwrap(); let mnt = cmd_mount(); eprintln!("Our mountpoint: {:?}\nfuse mounts:\n{}", tmp.path(), mnt,); assert!(mnt.contains(&*tmp.path().to_string_lossy())); assert!(is_mounted(&file)); drop(mount); let mnt = cmd_mount(); eprintln!("Our mountpoint: {:?}\nfuse mounts:\n{}", tmp.path(), mnt,); let detached = !mnt.contains(&*tmp.path().to_string_lossy()); // Linux supports MNT_DETACH, so we expect unmount to succeed even if the FS // is busy. Other systems don't so the unmount may fail and we will still // have the mount listed. The mount will get cleaned up later. #[cfg(target_os = "linux")] assert!(detached); if detached { // We've detached successfully, it's safe to clean up: std::mem::ManuallyDrop::<_>::into_inner(tmp); } // Filesystem may have been lazy unmounted, so we can't assert this: // assert!(!is_mounted(&file)); } }
fn main() { for i in vec![6,1,1,5,1,1,1] { print!("{}{}", "#".repeat(i), "\n"); } }
pub type Matrix = Vec<Vec<f32>>; /// Computes the product of the inputs `mat1` and `mat2`. pub fn mat_mult(mat1: &Matrix, mat2: &Matrix) -> Matrix { let rows = mat1.len(); let cols = mat2[0].len(); let mut newmat = vec![vec![0.;cols]; rows]; assert!(mat1[0].len() == mat2.len()); for col in 0..cols { for row in 0..rows { let mut sum = 0.; for i in 0..mat2.len() { sum = sum + mat1[row][i] * mat2[i][col]; } newmat[row][col] = sum; } } newmat }
fn main() { println!("Hello, world!"); let tup = (500, 6.4, 1); println!("Value of z is {}",anotherfunction(tup)); } fn anotherfunction(tup: (i32,f64,u8))-> u8{ println!("Value of y is {}",tup.1); anotheranotherfunction(3); tup.2 } fn anotheranotherfunction(mut x: i32){ while x!=0{ println!("{}!",x); x-=1; } println!("LIFTOFF!!"); }
use super::models::Todo; use sqlx::postgres::PgPool; pub async fn get_todos_for_owner_db(pool: &PgPool, owner_id: i32) -> Vec<Todo> { let todo_rows = sqlx::query!( "SELECT owner_id, todo_id, todo_name, posted_time FROM todos where owner_id = $1", owner_id ) .fetch_all(pool) .await .unwrap(); todo_rows .iter() .map(|todo_row| Todo { todo_id: todo_row.todo_id, owner_id: todo_row.owner_id, todo_name: todo_row.todo_name.clone(), posted_time: Some(chrono::NaiveDateTime::from(todo_row.posted_time.unwrap())), }) .collect() } pub async fn get_todo_details_db(pool: &PgPool, owner_id: i32, todo_id: i32) -> Todo { let todo_row = sqlx::query!( "SELECT owner_id, todo_id, todo_name, posted_time FROM todos where owner_id = $1 and todo_id = $2", owner_id, todo_id ) .fetch_one(pool) .await .unwrap(); Todo { todo_id: todo_row.todo_id, owner_id: todo_row.owner_id, todo_name: todo_row.todo_name.clone(), posted_time: Some(chrono::NaiveDateTime::from(todo_row.posted_time.unwrap())), } } pub async fn post_new_todo_db(pool: &PgPool, new_todo: Todo) -> Todo { let todo_row = sqlx::query!("insert into todos (todo_id,owner_id, todo_name) values ($1,$2,$3) returning owner_id, todo_id,todo_name, posted_time", new_todo.todo_id, new_todo.owner_id, new_todo.todo_name) .fetch_one(pool) .await.unwrap(); Todo { todo_id: todo_row.todo_id, owner_id: todo_row.owner_id, todo_name: todo_row.todo_name.clone(), posted_time: Some(chrono::NaiveDateTime::from(todo_row.posted_time.unwrap())), } }
use std::fmt::Debug; use std::collections::HashMap; use std::collections::VecDeque; type Index = usize; #[derive(Debug, Clone)] pub struct Node<T> where T: Clone { index: usize, child_index: Option<usize>, val: T, parent: Option<usize>, children: Vec<usize>, exhausted: bool, } #[derive(Debug, Clone, PartialEq, Eq)] pub enum Expr { Call, Symbol(Index), Scope(Index), LogicVariable(Index), Num(isize), Do, Array, Map, // How do I do this one? I guess its children are just even? Quote, } #[derive(Debug, Clone)] pub struct Meta<T> where T : Debug + Clone { original_expr: T, original_sub_expr: T, new_expr: T, new_sub_expr: T, } // Allocates and uses more storage than I technically need. // But I doubt this will ever be the bottleneck. #[derive(Debug, Clone)] pub struct Interner { lookup: HashMap<String, Index>, storage: Vec<String> } impl Interner { fn new() -> Interner { Interner { lookup: HashMap::new(), storage: vec![], } } pub fn intern(&mut self, symbol : &str) -> Index { if let Some(index) = self.lookup.get(symbol) { return *index } let index = self.storage.len(); self.lookup.insert(symbol.to_string(), index); self.storage.push(symbol.to_string()); index } pub fn lookup(&self, index: Index) -> Option<&String> { self.storage.get(index) } pub fn get_index(&self, symbol : &str) -> Option<&Index> { self.lookup.get(symbol) } } #[derive(Debug, Clone)] pub struct Forest<T> where T : Clone { pub root: Index, pub focus: Index, pub arena: Vec<Node<T>>, current_index: usize, } // Probably going to need to extend this to have root and focus pub trait ForestLike<T> where T : Clone + Debug { fn get_children(&self, index: Index) -> Option<&Vec<Index>>; fn get(&self, index: Index) -> Option<&Node<T>>; fn get_focus_node(&self) -> Option<&Node<T>>; fn get_focus(&self) -> Index; fn get_root(&self) -> Index; fn print_tree_inner<F>(&self, node: &Node<T>, prefix: String, last: bool, formatter: &F) where F : Fn(&T) -> String { let current_prefix = if last { "`- " } else { "|- " }; println!("{}{}{} {} {}", prefix, current_prefix, formatter(&node.val), node.index, node.exhausted); let child_prefix = if last { " " } else { "| " }; let prefix = prefix + child_prefix; let children = self.get_children(node.index).unwrap(); if !children.is_empty() { let last_child = children.len() - 1; for (i, child_index) in children.iter().enumerate() { if let Some(child) = self.get(*child_index) { self.print_tree_inner(&child, prefix.to_string(), i == last_child, formatter); } } } } // https://vallentin.dev/2019/05/14/pretty-print-tree fn print_tree<F>(&self, index: Index, formatter: F) where F : Fn(&T) -> String { if let Some(node) = self.get(index) { self.print_tree_inner(node, "".to_string(), true, &formatter) } } fn copy_tree_helper(&self, mut focus_index: Index, from_index: Index, into_parent_index: Option<Index>, forest: &mut Forest<T>) -> Option<Index> { if let Some(node) = self.get(from_index) { let new_index = if let Some(parent_index) = into_parent_index { forest.insert(node.val.clone(), parent_index, node.exhausted) } else { Some(forest.insert_root(node.val.clone(), node.exhausted)) }; if from_index == focus_index { if let Some(i) = new_index { focus_index = i; } } for child_index in self.get_children(node.index).unwrap() { if let Some(i) = self.copy_tree_helper(focus_index, *child_index, new_index, forest) { focus_index = i; } } Some(focus_index) } else { None } } } // Maybe this should string build instead of print? fn print_expr_inner(forest : &impl ForestLike<Expr>, node: &Node<Expr>, formatter: &impl FormatExpr) { match &node.val { Expr::Call => { let children = forest.get_children(node.index).unwrap(); let mut is_first = true; let last_child = children.len().saturating_sub(1); for (i, child_index) in children.iter().enumerate() { print_expr_inner(forest, forest.get(*child_index).unwrap(), formatter); if is_first { print!("("); is_first = false; } else if i == last_child { print!(")"); } else { print!(", "); } } } Expr::Array => { let children = forest.get_children(node.index).unwrap(); let last_child = children.len() - 1; print!("["); for (i, child_index) in children.iter().enumerate() { print_expr_inner(forest, forest.get(*child_index).unwrap(), formatter); if i == last_child { print!("]"); } else { print!(", "); } } print!("]"); } Expr::Map => { let children = forest.get_children(node.index).unwrap(); let last_child = children.len() - 1; print!("{{"); for (i, child_index) in children.iter().enumerate() { print_expr_inner(forest, forest.get(*child_index).unwrap(), formatter); if i % 2 == 0 { print!(": "); } else if i != last_child { print!(", "); } } print!("}}"); } Expr::Quote => { print!("'"); let children = forest.get_children(node.index).unwrap(); for (i, child_index) in children.iter().enumerate() { print_expr_inner(forest, forest.get(*child_index).unwrap(), formatter); } } x => { print!("{}", formatter.format_expr(&x)); let children = forest.get_children(node.index).unwrap(); for (i, child_index) in children.iter().enumerate() { print_expr_inner(forest, forest.get(*child_index).unwrap(), formatter); } } } } pub fn print_expr(forest : &impl ForestLike<Expr>, index: Index, formatter: &impl FormatExpr) { if let Some(node) = forest.get(index) { print_expr_inner(forest, node, formatter); println!(""); } } impl<T> ForestLike<T> for Forest<T> where T : Clone + Debug { fn get_children(&self, index: Index) -> Option<&Vec<Index>> { self.get(index).map(|x | &x.children) } fn get(&self, index: Index) -> Option<&Node<T>> { self.arena.get(index) } fn get_focus_node(&self) -> Option<&Node<T>> { self.get(self.focus) } fn get_focus(&self) -> Index { self.focus } fn get_root(&self) -> Index { self.root } } #[derive(Debug, Clone)] pub struct MetaForest<'a, T> where T : Clone + Debug { pub forest: &'a Forest<T>, pub meta: Meta<Index>, pub meta_parents: Meta<Option<Index>>, pub meta_index_start: Index, pub meta_nodes: Forest<T>, } impl<'a> MetaForest<'a, Expr> { fn new(meta: Meta<Index>, forest: &'a Forest<Expr>, symbols: & Interner) -> MetaForest<'a, Expr> { let meta_index_start = forest.arena.len(); let mut meta_nodes = Forest::new(); meta_nodes.current_index = meta_index_start; let mut meta_forest: MetaForest<'a, Expr> = MetaForest { meta, forest, meta_parents: Meta { original_expr: None, original_sub_expr: None, new_expr: None, new_sub_expr: None}, meta_index_start, meta_nodes: meta_nodes, }; meta_forest.setup(symbols); meta_forest } pub fn setup(&mut self, symbols: & Interner) { let original_expr = self.forest.get(self.meta.original_expr); let original_sub_expr =self.forest.get(self.meta.original_sub_expr); let new_expr = self.forest.get(self.meta.new_expr); let new_sub_expr = self.forest.get(self.meta.new_sub_expr); self.meta_parents = Meta { original_expr: original_expr.and_then(|x| x.parent), original_sub_expr: original_sub_expr.and_then(|x| x.parent), new_expr: new_expr.and_then(|x| x.parent), new_sub_expr: new_expr.and_then(|x| x.parent), }; // I'm really not sure about all of this. // It definitely doesn't feel right. let location = self.meta_nodes.insert_root_val(Expr::Map); // I've messed with the index here. Need to fix it back to 0. self.meta_nodes.root = 0; self.meta_nodes.focus = 0; self.meta_nodes.insert_child(Expr::Symbol(*symbols.get_index("original_expr").unwrap())); let root = self.meta_nodes.arena.get_mut(self.meta_nodes.root ).unwrap(); root.children.push(self.meta_index_start + 2); self.meta_nodes.insert_node(original_expr.unwrap().clone()); self.meta_nodes.insert_child(Expr::Symbol(*symbols.get_index("original_sub_expr").unwrap())); let root = self.meta_nodes.arena.get_mut(self.meta_nodes.root).unwrap(); root.children.push(self.meta_index_start + 4); self.meta_nodes.insert_node(original_sub_expr.unwrap().clone()); self.meta_nodes.insert_child(Expr::Symbol(*symbols.get_index("new_expr").unwrap())); let root = self.meta_nodes.arena.get_mut(self.meta_nodes.root).unwrap(); root.children.push(self.meta_index_start + 6); self.meta_nodes.insert_node(new_expr.unwrap().clone()); self.meta_nodes.insert_child(Expr::Symbol(*symbols.get_index("new_sub_expr").unwrap())); let root = self.meta_nodes.arena.get_mut(self.meta_nodes.root).unwrap(); root.children.push(self.meta_index_start + 8); self.meta_nodes.insert_node(new_sub_expr.unwrap().clone()); // let new_children = self.meta_nodes.get_root().unwrap().children.iter().map(|i| i + self.meta_index_start).collect(); // let root = self.meta_nodes.arena.get_mut(self.meta_nodes.root).unwrap(); // root.children = new_children; } } // What does it mean to mutate meta? Does it mean mutating the actual scope? // If I get to the point of mutating meta, I could track the offset of the meta // information and if you are past the meta_index_start subtract the length // of meta to find the underlying data. // Need to build up all these structures so they give coherent answers impl<'a> ForestLike<Expr> for MetaForest<'a, Expr> { fn get_children(&self, index: Index) -> Option<&Vec<Index>> { if Some(index) == self.meta_parents.original_expr { self.meta_nodes.get(2).map(|x| &x.children) } else if Some(index) == self.meta_parents.original_sub_expr { self.meta_nodes.get(4).map(|x| &x.children) } else if Some(index) == self.meta_parents.new_expr { self.meta_nodes.get(6).map(|x| &x.children) } else if Some(index) == self.meta_parents.new_sub_expr { self.meta_nodes.get(8).map(|x| &x.children) } else if index >= self.meta_index_start { let offset = index - self.meta_index_start; if offset <= 8 { self.meta_nodes.get(offset).map(|x | &x.children) } else { panic!("Asking for meta_children that is too big"); } } else { self.forest.get(index).map(|x | &x.children) } } fn get(&self, index: Index) -> Option<&Node<Expr>> { if index >= self.meta_index_start { let offset = index - self.meta_index_start; if offset <= 8 { self.meta_nodes.get(offset) } else { println!("{:?}, {:?}", self.meta_index_start, index); panic!("Asking for meta that is too big"); } } else if index == self.meta.new_expr { self.forest.get(self.meta.original_expr) } else if index == self.meta.new_sub_expr { self.forest.get(self.meta.original_sub_expr) } else { self.forest.get(index) } } fn get_focus_node(&self) -> Option<&Node<Expr>> { self.get(self.meta_index_start) } fn get_focus(&self) -> Index { self.meta_index_start } fn get_root(&self) -> Index { self.meta_index_start } } impl<T> Forest<T> where T : Clone + Debug { fn new() -> Forest<T> { Forest { root: 0, focus: 0, arena: vec![], current_index: 0, } } fn insert_root(&mut self, t: T, exhausted: bool) -> Index { let index = self.next_index(); let n = Node { index, child_index: None, val: t, parent: None, children: vec![], exhausted, }; self.arena.push(n); index } fn next_index(&mut self) -> usize { let index = self.current_index; self.current_index += 1; index } // What if parent doesn't exist? fn insert(&mut self, t: T, parent: Index, exhausted: bool) -> Option<Index> { let index = self.next_index(); let p = self.arena.get_mut(parent)?; p.children.push(index); let child_index = (&*p).children.len() - 1; self.arena.push(Node { index, child_index: Some(child_index), val: t, parent: Some(parent), children: vec![], exhausted, }); Some(index) } fn insert_node(&mut self, mut node: Node<T>) -> Index { let index = self.next_index(); node.index = index; self.arena.push(node); index } // focus_index is here so you can keep your focus after the tree is copied. fn copy_tree_helper(&self, mut focus_index: Index, from_index: Index, into_parent_index: Option<Index>, forest: &mut Forest<T>) -> Option<Index> { if let Some(node) = self.get(from_index) { let new_index = if let Some(parent_index) = into_parent_index { forest.insert(node.val.clone(), parent_index, node.exhausted) } else { forest.insert_root(node.val.clone(), node.exhausted); Some(0) }; if from_index == focus_index { if let Some(i) = new_index { focus_index = i; } } for child_index in node.children.clone() { if let Some(i) = self.copy_tree_helper(focus_index, child_index, new_index, forest) { focus_index = i; } } Some(focus_index) } else { None } } fn copy_tree_helper_f<F>(&self, from_index: Index, current_index: Index, parent_index: Option<Index>, forest: &mut Forest<T>, index_f : & F) where F : Fn(&T) -> Option<Index> { if let Some(node) = self.get(current_index) { let node = if let Some(new_index) = index_f(&node.val) { // println!("{:?} {:?} {:?}", new_index, current_index, forest); forest.get(new_index).unwrap().clone() } else { node.clone() }; let new_parent_index = if from_index == current_index { // I don't understand this. I thought I did. // The idea here is that the first call shouldn't do anything. // I thought I could just set this to parent_index, // but that doesn't work even though when this function is called, // the value of parent index is forest.focus // This might only work in some weird case and actually fail otherwise. Some(forest.focus) } else if parent_index.is_none() { // println!("No parent {:?}", node.val); let new_root = forest.insert_root(node.val, node.exhausted); forest.root = new_root; forest.focus = new_root; Some(new_root) } else { // println!("Parent {:?}", node.val); forest.insert(node.val, parent_index.unwrap(), node.exhausted) }; for child_index in node.children.clone() { self.copy_tree_helper_f(from_index, child_index, new_parent_index, forest, index_f); } } } // Might need to do this for a list of indexes? // sub_index is basically focus. I maybe don't need it? fn garbage_collect(&mut self, index: Index, sub_index: Index) -> Option<Index> { let mut forest = Forest::new(); let mut result_index = None; if let Some(node) = self.get(index) { result_index = self.copy_tree_helper(sub_index, index, None, &mut forest); } *self = forest; result_index } // Returns old nodes new location fn persistent_change(&mut self, t : T, index: Index) -> Option<Index> { let node = self.arena.get_mut(index)?; let node_clone = node.clone(); node.val = t; Some(self.insert_node(node_clone)) } fn clear_children(&mut self, index: Index) { if let Some(node) = self.arena.get_mut(index) { node.children.clear(); } } fn exhaust_focus(&mut self) { if let Some(node) = self.arena.get_mut(self.focus) { node.exhausted = true } } pub fn insert_child(&mut self, t : T) -> Option<Index> { // I could exhaust here if I did this // from program and new things about rules. self.insert(t, self.focus, false) } pub fn make_last_child_focus(&mut self) { let mut new_focus = None; if let Some(node) = self.get_focus_node() { if let Some(index) = node.children.last() { new_focus = Some(*index); } } if let Some(new_focus) = new_focus { self.move_focus(new_focus); } } pub fn make_first_child_focus(&mut self) { let mut new_focus = None; if let Some(node) = self.get_focus_node() { if let Some(index) = node.children.first() { new_focus = Some(*index); } } if let Some(new_focus) = new_focus { self.move_focus(new_focus); } } pub fn make_parent_focus(&mut self) { if let Some(parent) = self.get_focus_parent() { self.focus = parent; } } pub fn get_last_inserted_val(&self) -> Option<&T> { let node = self.arena.get(self.focus)?; let index = if node.children.len() > 0 { *node.children.last().unwrap() } else { self.focus }; self.get(index).map(|x| &x.val) } pub fn make_last_inserted_focus(&mut self) { if let Some(node) = self.arena.get(self.focus) { let index = if node.children.len() > 0 { *node.children.last().unwrap() } else { self.focus }; self.focus = index; } } pub fn swap_and_insert(&mut self, t: T) { if let Some(node) = self.arena.get(self.focus) { let index = if node.children.len() > 0 { *node.children.last().unwrap() } else { self.focus }; let mut node = self.arena.get_mut(index).unwrap(); let node_value = node.val.clone(); node.val = t; self.focus = index; self.insert_child(node_value); } } pub fn insert_root_val(&mut self, t: T) { let root = self.insert_root(t, false); self.root = root; self.focus = root; } // I should probably cache this root? // I can easily do this if changing root // goes through some method. fn get_root(&self) -> Option<&Node<T>> { self.get(self.root) } pub fn get_focus_val(&self) -> Option<&T> { self.get(self.focus).map(|x| &x.val) } // Could I cache this? // Trying to cache the root is actually harder than it seems. // I would need to store a reference in the struct. // But that reference needs a lifetime. fn root_is_exhausted(&self) -> bool { let root = self.get_root(); root.is_none() || root.unwrap().exhausted } fn focus_is_exhausted(&self) -> bool { let focus = self.get_focus_node(); focus.is_none() || focus.unwrap().exhausted } pub fn get_focus_parent(&self) -> Option<Index> { self.get_focus_node().and_then(|x| x.parent) } pub fn move_focus(&mut self, focus: Index) { self.focus = focus; } pub fn garbage_collect_self(&mut self) { if let Some(new_focus) = self.garbage_collect(self.root, self.focus) { self.root = 0; self.focus = new_focus; } } } impl Forest<Expr> { fn get_child_nums_binary(&self) -> Option<(Index, isize, isize)> { let focus = self.get_focus_node()?; if focus.children.len() != 3 { return None; } let node = self.get(*focus.children.get(0)?)?; match node { Node{val: Expr::Symbol(i), ..} => { let second_child = self.get(*focus.children.get(1)?)?; let third_child = self.get(*focus.children.get(2)?)?; if let (Node{val: Expr::Num(x), ..}, Node{val: Expr::Num(y), ..}) = (second_child, third_child) { return Some((*i, *x, *y)); } return None; } _ => return None } } pub fn focus_is_quote(&self) -> bool { let focus = self.get_focus_node(); focus.is_none() || focus.unwrap().val == Expr::Quote } pub fn pretty_print_tree(&self) { self.print_tree(self.root, |expr| { match expr { _ => format!("{:?}", expr) } }) } /// Should move to the next expr that needs evaluation fn move_to_next_reducible_expr(&mut self) { // let mut fuel = 0; loop { // fuel += 1; // if fuel > 100 { // break; // } if self.root_is_exhausted() { return } // The addition of quote here makes it so that everything below // a quote is not exhausted. Is that the correct behavior? Not sure. if self.focus_is_exhausted() || self.focus_is_quote() { // noop if exhausted, exhausts quote otherwise; self.exhaust_focus(); if let Some(index) = self.get_focus_parent() { // println!("Moving to parent {:?}", self.get_focus_node()); self.focus = index; continue; } else { return } } if let Some(focus) = self.get_focus_node() { if focus.children.len() == 0 { return } let mut all_children_exhausted = true; for child in focus.children.iter() { if let Some(c) = self.get(*child) { if !c.exhausted { self.focus = *child; all_children_exhausted = false; break; } } } if all_children_exhausted { break; } } } } } #[derive(Debug, Clone)] pub struct Clause { pub in_scopes: Vec<Index>, pub out_scopes: Vec<Index>, pub left: Index, pub right: Index, } #[derive(Debug, Clone)] pub struct MatchingClause { pub out_scope: Index, pub rule_index: Index, pub environment: HashMap<Index, Index>, } #[derive(Debug, Clone)] pub struct Program { // Can we rewrite meta? We can match on meta and rewrite else where. // But what would it mean to rewrite meta? pub meta: Meta<Index>, // Technically these chould just be in the hashmap. // But it seems worthwhile to bring them to the top level. pub main: Forest<Expr>, pub io: Forest<Expr>, // We will need some structure for the preprocessed rules. // Like keeping a list of clauses by scope and type. pub rules: Forest<Expr>, pub symbols: Interner, pub scopes: HashMap<Index, Forest<Expr>>, pub clause_indexes: Vec<Clause>, pub main_scope_index: Index, pub meta_scope_index: Index, pub rules_scope_index: Index, pub io_scope_index: Index, } pub trait FormatExpr { fn format_expr(&self, expr: &Expr) -> String { format!("{:?}", expr) } } impl FormatExpr for Interner { fn format_expr(&self, expr: &Expr) -> String { match expr { Expr::Symbol(index) | Expr::LogicVariable(index) | Expr::Scope(index) => { let value = self.lookup(*index).unwrap().clone(); if value.len() == 1 && !value.chars().next().unwrap().is_alphanumeric() { format!("({})", value) } else { value } } _ => format!("{:?}", expr) } } } // These macros exist to appease the borrow checker. Can't extract them // out into functions, but as macros the borrow check understands what is happening. macro_rules! get_scope_mut_for_index { ($program:expr, $scope_index:expr) => { if $scope_index == $program.main_scope_index { &mut $program.main } else if $scope_index == $program.io_scope_index { &mut $program.io } else if $scope_index == $program.rules_scope_index { &mut $program.rules } else if $program.scopes.contains_key(&$scope_index) { $program.scopes.get_mut(&$scope_index).unwrap() } else { panic!("Scope does not exist"); } }; } macro_rules! get_scope_mut_for_index_no_rules { ($program:expr, $scope_index:expr) => { if $scope_index == $program.main_scope_index { &mut $program.main } else if $scope_index == $program.io_scope_index { &mut $program.io } else if $program.scopes.contains_key(&$scope_index) { $program.scopes.get_mut(&$scope_index).unwrap() } else { panic!("Scope does not exist"); } }; } macro_rules! get_scope_for_index_no_rules { ($program:expr, $scope_index:expr) => { if $scope_index == $program.main_scope_index { & $program.main } else if $scope_index == $program.io_scope_index { & $program.io } else if $program.scopes.contains_key(&$scope_index) { $program.scopes.get(&$scope_index).unwrap() } else { panic!("Scope does not exist"); } }; } macro_rules! get_scope_pairs { ($program:expr, $scope_index:expr, $effect_index:expr) => { if $scope_index == $program.main_scope_index && $effect_index == $program.io_scope_index { (&$program.main, &mut $program.io) } else if $scope_index == $program.io_scope_index && $effect_index == $program.main_scope_index { (&$program.io, &mut $program.main) } else { panic!("Pair scopes incomplete. Also, have no idea how to extend it any not anger the borrow checker."); }; }; } impl Program { pub fn new() -> Program { let mut symbols = Interner::new(); symbols.intern("builtin/+"); symbols.intern("builtin/-"); symbols.intern("builtin/*"); symbols.intern("builtin/div"); symbols.intern("fact"); let main_scope_index = symbols.intern("@main"); let io_scope_index = symbols.intern("@io"); let rules_scope_index = symbols.intern("@rules"); let meta_scope_index = symbols.intern("@meta"); symbols.intern("original_expr"); symbols.intern("original_sub_expr"); symbols.intern("new_expr"); symbols.intern("new_sub_expr"); let meta = Meta { original_expr: 0, original_sub_expr: 0, new_expr: 0, new_sub_expr: 0, }; let mut program = Program { meta: meta.clone(), main: Forest::new(), io: Forest::new(), rules: Forest::new(), symbols: symbols, scopes: HashMap::new(), clause_indexes: Vec::new(), main_scope_index, meta_scope_index, io_scope_index, rules_scope_index, }; // Hack to make it so there is always a root. program.main.insert_root_val(Expr::Num(0)); program.io.insert_root_val(Expr::Num(0)); program.rules.insert_root_val(Expr::Num(0)); program } fn construct_scopes(&self, scope_attribute_index : Index) -> Option<Vec<Index> >{ let array = self.rules.get(scope_attribute_index)?; let mut results = Vec::with_capacity(array.children.len()); for index in &array.children { if let Expr::Scope(i) = self.rules.get(*index)?.val { results.push(i) } } Some(results) } // So as I add rules I could totally index incrementally by keeping the last index // of how far I indexed and then only indexing more from there. pub fn set_clause_indexes(&mut self) -> Option<()> { // This is returning option of void because I'm tired of not being able to use ?. let in_scope_symbol = Expr::Symbol(*self.symbols.get_index("in_scopes")?); let out_scope_symbol = Expr::Symbol(*self.symbols.get_index("out_scopes")?); let clauses_symbol = Expr::Symbol(*self.symbols.get_index("clauses")?); // The rules are quoted so it is two levels deep. // Need to make better things for traversing. let rules = &self.rules.get(*self.rules.get_root()?.children.get(0)?)?.children; let mut clauses : Vec<Clause> = vec![]; for rule_index in rules { let rule = self.rules.get(*rule_index)?; let mut in_scope_index = None; let mut out_scope_index = None; // This is super ugly. But it kind of makes sense? let mut next_is_clauses = false; for attribute_index in &rule.children { let node = self.rules.get(*attribute_index)?; // Right now I'm kind of assuming these come before clauses if node.val == in_scope_symbol { in_scope_index = Some(attribute_index + 1); }; if node.val == out_scope_symbol { out_scope_index = Some(attribute_index + 1); }; if next_is_clauses { next_is_clauses = false; for clause_index in &node.children { let clause = self.rules.get(*clause_index)?; clauses.push(Clause{ left: *clause.children.get(1)?, right: *clause.children.get(3)?, // need to construct these from the indexes in_scopes: self.construct_scopes(in_scope_index?)?, out_scopes: self.construct_scopes(out_scope_index?)?, }); } } if node.val == clauses_symbol { next_is_clauses = true; } } } self.clause_indexes = clauses; Some(()) } // I want to minimize allocation here, so I might look at having // some object that will give me new environments and keep them around // so I don't have to allocate and deallocate a new one everytime. // Also will allocate the queue at a higher level instead of each function call. pub fn build_env(&self, scope: &impl ForestLike<Expr>, left_hand_index : Index, expr_index : Index) -> Option<HashMap<Index, Index>> { let mut env = HashMap::new(); let mut queue: VecDeque<(Index, Index)> = VecDeque::new(); queue.push_front((left_hand_index, expr_index)); let mut failed = false; while !queue.is_empty() && !failed { let (left_hand_index, expr_index) = queue.pop_front().unwrap(); let elems = (self.rules.get(left_hand_index)?, scope.get(expr_index)?); match elems { (Node{ val: Expr::LogicVariable(l_index), ..}, _) => { env.insert(*l_index, expr_index); } (Node {val: Expr::Map, index: l_index, ..}, Node{ val: Expr::Map, index: e_index, ..}) => { let l_children = self.rules.get_children(*l_index)?; let e_children = scope.get_children(*e_index)?; for i in (0..l_children.len()).step_by(2) { for j in (0..e_children.len()).step_by(2) { let i_index = *l_children.get(i)?; let j_index = *e_children.get(j)?; // This is a suboptimal way of doing these things. // TODO: Make better if self.rules.get(i_index)?.val == scope.get(j_index)?.val { // println!("{:?}, {:?}", self.rules.get(i_index)?.val, scope.get(j_index)?.val); queue.push_front((*l_children.get(i+1)?, *e_children.get(j+1)?)); } } } }, (Node{ val: l_val, index: l_index, ..}, Node{ val: e_val, index: e_index, ..}) => { let l_children = self.rules.get_children(*l_index)?; let e_children = scope.get_children(*e_index)?; if l_val != e_val { failed = true; break; } // Children length will have to change once repeats exist. // Children length is also wrong for maps. // In general, I need to handle maps differently. // Really this representation is just wrong for maps, // but I will probably ignore that for now. if l_children.len() != e_children.len() { failed = true; break; } for i in 0..l_children.len() { queue.push_front((*l_children.get(i)?, *e_children.get(i)?)) } } } }; if failed { None } else { Some(env) } } pub fn pretty_print_scope(&self, scope : &impl ForestLike<Expr>) { // Need to refactor to use some non closure formatter like I did with FormatExpr let index = scope.get_focus(); scope.print_tree(index, |expr| { match expr { Expr::Symbol(index) | Expr::LogicVariable(index) | Expr::Scope(index) => { let value = self.symbols.lookup(*index).unwrap().clone(); if value.len() == 1 && !value.chars().next().unwrap().is_alphanumeric() { format!("({})", value) } else { value } } _ => format!("{:?}", expr) } }) } pub fn pretty_print_main(&self) { self.pretty_print_scope(&self.main) } pub fn substitute(scope: &mut Forest<Expr>, rule_scope: &Forest<Expr>, right_index: Index, env: &HashMap<Index, Index>) -> Option<Index> { let right = rule_scope.get(right_index).unwrap().clone(); let right_replace = match right.val { Expr::LogicVariable(index) => { scope.get(*env.get(&index).unwrap()).unwrap().clone().val } val => val }; let node = scope.get_focus_node().unwrap(); let parent = node.parent.clone(); let result = scope.persistent_change(right_replace, scope.focus); let focus = scope.arena.get_mut(scope.focus).unwrap(); focus.children.clear(); rule_scope.copy_tree_helper_f(right_index, right_index, parent, scope, & |val| { match val { Expr::LogicVariable(index) => { Some(*env.get(&index).unwrap()) } _ => None } }); // What about the root? Does it actually matter or just the focus? // I do need to know the root for meta evaluation, but root should only // change here if the focus is the root. Or at least I think so. result } pub fn get_node_for_substitution(right: &Node<Expr>, in_scope: &impl ForestLike<Expr>, env: &HashMap<Index, Index>) -> Option<Node<Expr>> { match &right.val { Expr::LogicVariable(index) => { Some(in_scope.get(*env.get(&index)?)?.clone()) } _ => None } } pub fn transfer_and_substitute(in_scope: &impl ForestLike<Expr>, mut out_scope: &mut Forest<Expr>, rule_scope: &Forest<Expr>, right_index: Index, env: &HashMap<Index, Index>, parent_index: Option<Index>) -> Option<Index> { let node = rule_scope.get(right_index)?; let new_node = Program::get_node_for_substitution(node, in_scope, &env); let is_root = parent_index.is_none(); // Not sure if this should be root or focus? let new_location = if new_node.is_some() { let index = new_node?.index; let new_location = in_scope.copy_tree_helper(index, index, parent_index, &mut out_scope)?; if is_root { out_scope.root = new_location; } new_location } else { if is_root { out_scope.insert_root_val(node.val.clone()); out_scope.root } else { out_scope.insert(node.val.clone(), parent_index?, false)? } }; for child_index in rule_scope.get_children(right_index)? { Program::transfer_and_substitute(in_scope, out_scope, rule_scope, *child_index, env, Some(new_location)); } // We changed the focus as we were going down the tree, // but we need to reset it to our root. if is_root { out_scope.focus = out_scope.root; } None } // Needs to return output scope pub fn find_matching_rules(&self, scope_symbol_index: Index, expr_index: Index, scope: &impl ForestLike<Expr>) -> (Option<MatchingClause>, Vec<MatchingClause>) { let mut matching_rules = vec![]; let mut matching_rule = None; for Clause{left,right, in_scopes, out_scopes} in &self.clause_indexes { if !in_scopes.contains(&scope_symbol_index) { continue }; let env = self.build_env(scope, *left, expr_index); if env.is_none() { continue }; let clause = MatchingClause { // Right now we are assuming a singular out scope. // In general, I need to think about rules with multiple scopes. out_scope: *out_scopes.first().unwrap(), environment: env.unwrap(), rule_index: *right }; let out_scope_matches = *out_scopes.first().unwrap() == scope_symbol_index; if matching_rule.is_none() && out_scope_matches { matching_rule = Some(clause) } else if !out_scope_matches { matching_rules.push(clause); } // Need to check outscope and do side effects here. // The first element of the vector will be our main rule // and then the rest will be side effects. }; (matching_rule, matching_rules) } pub fn build_meta_forest(&self, scope_index: Index, meta_original_focus: Index, meta_original_root: Index, scope_root: Index, scope_focus: Index) -> MetaForest<Expr>{ let meta = Meta { original_expr: meta_original_root, original_sub_expr: meta_original_focus, new_expr: scope_root, new_sub_expr: scope_focus, }; let scope = self.get_scope_ref_from_index(scope_index); let symbols = &self.symbols; let meta_scope_index = self.symbols.get_index("@meta").unwrap(); let meta_forest = MetaForest::new(meta.clone(), scope, symbols); meta_forest } pub fn handle_builtin(&mut self, scope_index: Index) -> Option<(Index, Index)> { let scope = get_scope_mut_for_index!(self, scope_index); let node = scope.get_focus_node()?; if node.val != Expr::Call { return None }; let children = scope.get_children(node.index)?; let first_child = scope.get(*children.first()?)?; if let Expr::Symbol(symbol_index) = first_child.val { let symbol_value = self.symbols.lookup(symbol_index)?; match symbol_value.as_str() { "builtin/println" => { print_expr(scope, *children.get(1)?, &self.symbols); scope.exhaust_focus(); // Returning none here might not be the right option. // That means we can't meta on a print statement, which seems wrong. // But I also don't currently rewrite it. Need to revisit. return None }, "builtin/add-rule" => { scope.exhaust_focus(); // moves me to the quote. scope.make_last_child_focus(); // moves to the content on the quote scope.make_first_child_focus(); let rules = &mut self.rules; // moves to the array rules.make_first_child_focus(); let array = rules.get_focus(); let scope = get_scope_for_index_no_rules!(self, scope_index); // Need to actually add a rule // This means copying some tree into the rules // adding a new child node to the array of rules // and reindexing clauses // focus_index tells me where the stuff ended up. I don't care about that. // Also this is just a weird function. scope.copy_tree_helper(array, scope.get_focus(), Some(array), rules); // Maybe if I created builtin append that would be easier? Maybe? self.set_clause_indexes(); } _ => { if let Some((symbol_index, x, y)) = scope.get_child_nums_binary() { if symbol_index < 4 { // These are implicit right now based on the // order I inserted them in the constructor. let val = match symbol_index { 0 => Expr::Num(x + y), 1 => Expr::Num(x - y), 2 => Expr::Num(x * y), 3 => Expr::Num(x / y), _ => panic!("Not possible because of if above.") }; let meta_original_focus = scope.persistent_change(val, scope.focus)?; let meta_original_root = if scope.focus == scope.root { meta_original_focus } else { scope.root }; scope.clear_children(scope.focus); return Some((meta_original_focus, meta_original_root)) } } } } }; None } pub fn get_scope_ref_from_index(&self, scope_index: Index) -> &Forest<Expr> { if scope_index == self.main_scope_index { &self.main } else if scope_index == self.io_scope_index { &self.io } else if scope_index == self.rules_scope_index { &self.rules } else if self.scopes.contains_key(&scope_index) { self.scopes.get(&scope_index).unwrap() } else { panic!("Scope does not exist"); } } pub fn get_scope_index_for_scope_name(&self, scope_name: &str) -> Index { *self.symbols.get_index(scope_name).unwrap() } pub fn rewrite(&mut self, scope_index: Index) -> Option<()> { let rules = &self.rules; let scope = self.get_scope_ref_from_index(scope_index); let original_root = scope.root; let original_sub_expr = scope.focus; let (matching_rule, original_side_effects) = self.find_matching_rules(scope_index, scope.focus, scope); // This is one thing I don't like about rust. I have to make these // variable or else self is now borrowed both mutabily and immutabily. let scope_root = scope.root; let scope_focus = scope.focus; let meta_info = if let Some(MatchingClause{environment: env, rule_index: right, out_scope}) = &matching_rule { let scope = get_scope_mut_for_index_no_rules!(self, scope_index); let meta_original_focus = Program::substitute(scope, rules, *right, env)?; let meta_original_root = if scope.focus == scope.root { meta_original_focus } else { scope.root }; Some((meta_original_focus, meta_original_root)) } else { let result = self.handle_builtin(scope_index); result }; if let Some((meta_original_focus, meta_original_root)) = meta_info { let meta_forest = self.build_meta_forest( scope_index, meta_original_focus, meta_original_root, scope_root, scope_focus, ); let meta_scope_index = self.symbols.get_index("@meta").unwrap(); let (matching_rule, side_effects) = self.find_matching_rules(*meta_scope_index, meta_forest.meta_index_start, &meta_forest); let meta = meta_forest.meta; for effect in &side_effects { let effect_index = effect.out_scope; let symbols = &self.symbols; let rules = &self.rules; let (scope, out_scope) = get_scope_pairs!(self, scope_index, effect_index); let meta_forest = MetaForest::new(meta.clone(), scope, symbols); Program::transfer_and_substitute(&meta_forest, out_scope, rules, effect.rule_index, &effect.environment, None); self.rewrite(effect_index); } // println!("{:?}", matching_rule); } if let Some((meta_original_focus, meta_original_root)) = meta_info { let meta = Meta { original_expr: meta_original_root, original_sub_expr: meta_original_focus, new_expr: scope_root, new_sub_expr: scope_focus, }; for effect in &original_side_effects { let rules = &self.rules; let symbols = &self.symbols; let (scope, out_scope) = get_scope_pairs!(self, scope_index, effect.out_scope); let meta_forest = MetaForest::new(meta.clone(), scope, symbols); Program::transfer_and_substitute(&meta_forest, out_scope, rules, effect.rule_index, &effect.environment, None); self.rewrite(effect.out_scope); } } if meta_info.is_none() { let scope = get_scope_mut_for_index!(self, scope_index); // No rules matched, so we exhaust scope.exhaust_focus(); } None } fn step(&mut self) { // println!("step"); let scope = &mut self.main; scope.move_to_next_reducible_expr(); if scope.root_is_exhausted() { return } // rewrite should return old node location. // meta needs to change. self.rewrite(self.get_scope_index_for_scope_name("@main")); } pub fn full_step(&mut self) { // let mut fuel = 0; // println!("Full step"); while !self.main.root_is_exhausted() { // self.pretty_print_main(); // fuel +=1; // if fuel > 100 { // println!("break"); // break; // } self.step(); } // self.pretty_print_scope(&self.rules); // println!("{:?}", self.clause_indexes); } } // I need to do a precompute for exhaustion. // Then I could either exhaust as I parse, or exhaust across the whole tree in linear time. // Can I reduce rules into some form of bytecode? // Am I that far way from a compiler? Can I eliminate these vectors of children a the nodes? // Is there a good model of stack computation to be had here? // Need to create a way to add rules // Need to make scopes lazily evaluated. // Need to add an append builtin // Need to think about how input/repling should work // Need to think about how scoped temporary rules would work.
// On va tester une condition dans une boucle. // Tant que (while) la condition est vraie (true), la boucle tourne. fn main() { // définir une variable modifiable let mut nombre = 8; // déclaration de la boucle // l'EXPRESSION 'nombre =! 0' renvoie une valeur booléenne, true ou false // la boucle s'arrêtera quand l'expression renverra false while nombre != 0 { // afficher le nombre println!("{}", nombre); // désincrémenter. C'est une déclaration. nombre = nombre - 1; } println!("On dégage !"); }
use std::{ fs }; pub fn main() -> Option<bool> { let file_contents = match fs::read_to_string( "./inputs/2020-12-05-aoc-01-input.txt" ) { Ok(c) => c, Err(e) => panic!("{:?}", e) }; let largest_id = file_contents .split('\n') .map(|line| { let result = get_row_and_column(line); result.0 * 8 + result.1 }) .max()?; println!("The largest id is: {}", largest_id); Some(true) } fn get_row_and_column(line: &str) -> (usize, usize) { let result = line .chars() .enumerate() .fold(((0, 127), (0, 7)), |mut result, (index, val)| { if index < 6 { let half = (result.0.1 - result.0.0) / 2; if val == 'F' { result.0 = (result.0.0, result.0.0 + half); } else { result.0 = (result.0.0 + half + 1, result.0.1); } } else if index == 6 { if val == 'F' { result.0 = (result.0.0, 0); } else { result.0 = (result.0.1, 0); } } else if index > 6 && index < 9 { let half = (result.1.1 - result.1.0) / 2; if val == 'L' { result.1 = (result.1.0, result.1.0 + half); } else { result.1 = (result.1.0 + half + 1, result.1.1); } } else { if val == 'L' { result.1 = (result.1.0, 0); } else { result.1 = (result.1.1, 0); } } result }); (result.0.0, result.1.0) } #[test] fn it_works() { assert_eq!(get_row_and_column("FBFBBFFRLR"), (44, 5)); }
/// A man-in-the-middle proxy to only intercept gacha log page use std::{ convert::Infallible, future::{self, Future, Ready}, pin::Pin, sync::Arc, task::{Context, Poll}, }; use futures::future::TryFutureExt; use hyper::{ client::{ connect::{dns::GaiResolver, HttpConnector}, Client, }, server::{ conn::{AddrIncoming, AddrStream, Http}, Server, }, service::{service_fn, Service}, upgrade, Body, Method, Request, Response, Uri, }; use hyper_rustls::HttpsConnector; use reqwest::Url; use rustls::{Certificate, NoClientAuth, PrivateKey, ServerConfig}; use tokio::{ io::{copy as async_copy, split as async_split}, net::TcpStream, sync::mpsc, task::spawn, }; use tokio_rustls::TlsAcceptor; use crate::mitm::{DOMAIN_INTERCEPT, PAGE_INTERCEPT_SUFFIX}; #[derive(Clone)] pub struct MitmService { client: Arc<Client<HttpsConnector<HttpConnector<GaiResolver>>, Body>>, tls_cfg: Arc<ServerConfig>, sender: mpsc::Sender<Url>, } impl MitmService { fn new(certificate: Certificate, private_key: PrivateKey) -> (mpsc::Receiver<Url>, Self) { let (sender, receiver) = mpsc::channel(16); let mut tls_cfg = ServerConfig::new(NoClientAuth::new()); tls_cfg .set_single_cert(vec![certificate], private_key) .unwrap(); ( receiver, Self { client: Arc::new(Client::builder().build(HttpsConnector::with_native_roots())), tls_cfg: Arc::new(tls_cfg), sender, }, ) } } /// `MitmService` as `MakeService` impl Service<&AddrStream> for MitmService { type Response = MitmService; type Error = Infallible; type Future = Ready<Result<Self::Response, Self::Error>>; fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { Poll::Ready(Ok(())) } fn call(&mut self, _socket: &AddrStream) -> Self::Future { future::ready(Ok(self.clone())) } } /// `MitmService` as `HttpService` impl Service<Request<Body>> for MitmService { type Response = Response<Body>; type Error = anyhow::Error; type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>; fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { Poll::Ready(Ok(())) } fn call(&mut self, req: Request<Body>) -> Self::Future { if *req.method() == Method::CONNECT { // Handle SSL request let host = req.uri().authority().map(|a| a.host()); if DOMAIN_INTERCEPT.iter().any(|&domain| host == Some(domain)) { Box::pin(self.clone().proxy_intercept(req)) } else { Box::pin(self.clone().proxy_pass_tls(req)) } } else { Box::pin(self.clone().proxy_pass_http(req)) } } } impl MitmService { /// Intercept the request, if the following uri match what we are looking for, send it through the channel async fn proxy_intercept(self, mut req: Request<Body>) -> anyhow::Result<Response<Body>> { spawn(async move { let tls_cfg = self.tls_cfg; let client = self.client; let sender = self.sender; if let Ok(stream) = upgrade::on(&mut req) .map_err(anyhow::Error::from) .and_then(|upgraded: upgrade::Upgraded| { TlsAcceptor::from(tls_cfg) .accept(upgraded) .map_err(anyhow::Error::from) }) .await { let service = service_fn(move |mut req: Request<Body>| { let client = client.clone(); let sender = sender.clone(); async move { let new_uri = Uri::builder() .scheme("https") .authority( req.headers() .get("host") .map(|h| h.to_str().unwrap()) .unwrap(), ) .path_and_query( req.uri() .path_and_query() .map(|pq| pq.as_str()) .unwrap_or("/"), ) .build() .unwrap(); *req.uri_mut() = new_uri; if req .uri() .path_and_query() .map(|pq| pq.path().ends_with(PAGE_INTERCEPT_SUFFIX)) == Some(true) { let url = req.uri().to_string().parse().unwrap(); sender.send(url).await?; } Ok::<_, anyhow::Error>(client.request(req).await?) } }); let http = Http::new(); let server = http.serve_connection(stream, service); server.await.ok(); } }); Ok(Response::new(Body::empty())) } /// Upgrade the connection to TCPStream and pipe it to upstream authority async fn proxy_pass_tls(self, mut req: Request<Body>) -> anyhow::Result<Response<Body>> { let mut remote_stream = Self::acquire_connection(&req).await?; spawn(async move { if let Ok(upgraded) = upgrade::on(&mut req).await { let (mut remote_read, mut remote_write) = remote_stream.split(); let (mut client_read, mut client_write) = async_split(upgraded); let client_to_remote = async_copy(&mut client_read, &mut remote_write); let remote_to_client = async_copy(&mut remote_read, &mut client_write); tokio::try_join!(client_to_remote, remote_to_client).ok(); } }); Ok(Response::new(Body::empty())) } /// Pass the proxy request along without tapping async fn proxy_pass_http(self, req: Request<Body>) -> anyhow::Result<Response<Body>> { Ok(self.client.request(req).await?) } /// Acquire a raw tcp connection to the authority of the request async fn acquire_connection(req: &Request<Body>) -> anyhow::Result<TcpStream> { let mut connector = HttpConnector::new(); let uri = hyper::Uri::builder() .scheme("http") .authority( req.uri() .authority() .map(|a| a.as_str()) .unwrap_or_default(), ) .path_and_query("/") .build() .unwrap(); Ok(connector.call(uri).await?) } } /// Create a man-in-the-middle proxy server and a receiver to receive the detected url pub fn make_mitm_server( certificate: Certificate, private_key: PrivateKey, ) -> (mpsc::Receiver<Url>, Server<AddrIncoming, MitmService>) { let (receiver, service) = MitmService::new(certificate, private_key); ( receiver, Server::bind(&"0.0.0.0:0".parse().unwrap()).serve(service), ) }
mod deprecated_runner; mod providers; mod rpc; mod session; mod session_client; mod state; mod types; mod vim; use std::io::prelude::*; use std::io::{BufReader, BufWriter}; use std::ops::Deref; use std::sync::Arc; use anyhow::Result; use crossbeam_channel::{Receiver, Sender}; use once_cell::sync::OnceCell; use serde::Serialize; use serde_json::json; use self::providers::{ dumb_jump, filer::{self, FilerSession}, quickfix, recent_files, BuiltinSession, }; use self::rpc::{Call, RpcClient}; use self::session::{SessionEvent, SessionManager}; use self::session_client::SessionClient; use self::state::State; use self::types::GlobalEnv; pub use self::deprecated_runner::{run_forever, write_response}; pub use self::rpc::{MethodCall, Notification}; static GLOBAL_ENV: OnceCell<GlobalEnv> = OnceCell::new(); /// Ensure GLOBAL_ENV has been instalized before using it. pub fn global() -> impl Deref<Target = GlobalEnv> { if let Some(x) = GLOBAL_ENV.get() { x } else if cfg!(debug_assertions) { panic!("Uninitalized static: GLOBAL_ENV") } else { unreachable!("Never forget to intialize before using it!") } } /// Starts and keep running the server on top of stdio. pub fn start() -> Result<()> { let (call_tx, call_rx) = crossbeam_channel::unbounded(); let rpc_client = Arc::new(RpcClient::new( BufReader::new(std::io::stdin()), BufWriter::new(std::io::stdout()), call_tx.clone(), )); let state = State::new(call_tx, rpc_client); let session_client = SessionClient::new(state); session_client.loop_call(&call_rx); Ok(()) }
use super::board::{ Board , BOARD_VEC_SIZE }; use std::io::prelude::*; use std::io::Result as IOResult; use std::io::Error as IOError; use std::io::ErrorKind as IOErrorKind; use std::io::{ BufReader }; use std::result::Result; use std::fs::File; use std::path::Path; pub trait LoadBoard { fn new( file_name: String ) -> Self; fn load( &self ) -> IOResult<Board>; } pub struct LoadBoardOneLinerSimpleFormat { file_name: String, } fn load_one_board(line: &String) -> Result< Board, String > { let mut board = Board::empty(); for (idx, ch) in line.chars().into_iter().enumerate() { // check that the maximum number of values on a board match idx { idx if idx < BOARD_VEC_SIZE => { match ch { // only these chars are expected ch if '1' <= ch && ch <= '9' => { let digit: u8 = ch.to_digit( 10 ).unwrap() as u8; //board.value_array[ idx ].value = Some( digit ) board.value_array[ idx ].set_value( digit ); } // recognized empty'es ch if '.' == ch || '0' == ch => { // leave empty cell } // not recognized ch => { //panic!( "unrecognized character in input '{}'; only digits and '.' are supported" , ch ); let mes: String = format!( "unrecognized character in input '{}'; only digits and '.' are supported" , ch ); return Err( mes ) } } }, idx => { let mes = format!( "a maximum of {} elements are expected, found {}" , BOARD_VEC_SIZE , idx + 1 ); return Err( mes ) }, } } Ok( board ) } impl LoadBoard for LoadBoardOneLinerSimpleFormat { fn new( file_name: String ) -> LoadBoardOneLinerSimpleFormat { LoadBoardOneLinerSimpleFormat { file_name: file_name } } //#[allow(unused_assignments)] fn load(&self) -> IOResult<Board> { let file_path = Path::new( &self.file_name ); let file = File::open( &file_path )?; let buf_read = BufReader::new( &file ); let mut _line_count = 0; //for (i, line) in buf_read.lines().into_iter().enumerate() { for line in buf_read.lines().into_iter() { _line_count += 1; let line_str: &String = &line?; let new_board = load_one_board( line_str ); match new_board { Ok( nb ) => { return Ok( nb ); } Err( e ) => { return Err( IOError::new( IOErrorKind::Other , e ) ); } } } if _line_count == 0 { return Err( IOError::new( IOErrorKind::Other , "no board description found" ) ) } Err( IOError::new( IOErrorKind::Other , "unspecified error" ) ) } }
fn main() { println!("{}", levenshtein_distance("kitten", "sitting")); println!("{}", levenshtein_distance("saturday", "sunday")); println!("{}", levenshtein_distance("rosettacode", "raisethysword")); } fn levenshtein_distance(word1: &str, word2: &str) -> usize { let w1 = word1.chars().collect::<Vec<_>>(); let w2 = word2.chars().collect::<Vec<_>>(); let word1_length = w1.len() + 1; let word2_length = w2.len() + 1; let mut matrix = vec![vec![0]]; for i in 1..word1_length { matrix[0].push(i); } for j in 1..word2_length { matrix.push(vec![j]); } for j in 1..word2_length { for i in 1..word1_length { let x: usize = if w1[i-1] == w2[j-1] { matrix[j-1][i-1] } else { 1 + std::cmp::min( std::cmp::min(matrix[j][i-1], matrix[j-1][i]) , matrix[j-1][i-1]) }; matrix[j].push(x); } } matrix[word2_length-1][word1_length-1] }
// This file is part of Substrate. // Copyright (C) 2020 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! # Society Module //! //! - [`society::Trait`](./trait.Trait.html) //! - [`Call`](./enum.Call.html) //! //! ## Overview //! //! The Society module is an economic game which incentivizes users to participate //! and maintain a membership society. //! //! ### User Types //! //! At any point, a user in the society can be one of a: //! * Bidder - A user who has submitted intention of joining the society. //! * Candidate - A user who will be voted on to join the society. //! * Suspended Candidate - A user who failed to win a vote. //! * Member - A user who is a member of the society. //! * Suspended Member - A member of the society who has accumulated too many strikes //! or failed their membership challenge. //! //! Of the non-suspended members, there is always a: //! * Head - A member who is exempt from suspension. //! * Defender - A member whose membership is under question and voted on again. //! //! Of the non-suspended members of the society, a random set of them are chosen as //! "skeptics". The mechanics of skeptics is explained in the //! [member phase](#member-phase) below. //! //! ### Mechanics //! //! #### Rewards //! //! Members are incentivized to participate in the society through rewards paid //! by the Society treasury. These payments have a maturity period that the user //! must wait before they are able to access the funds. //! //! #### Punishments //! //! Members can be punished by slashing the reward payouts that have not been //! collected. Additionally, members can accumulate "strikes", and when they //! reach a max strike limit, they become suspended. //! //! #### Skeptics //! //! During the voting period, a random set of members are selected as "skeptics". //! These skeptics are expected to vote on the current candidates. If they do not vote, //! their skeptic status is treated as a rejection vote, the member is deemed //! "lazy", and are given a strike per missing vote. //! //! #### Membership Challenges //! //! Every challenge rotation period, an existing member will be randomly selected //! to defend their membership into society. Then, other members can vote whether //! this defender should stay in society. A simple majority wins vote will determine //! the outcome of the user. Ties are treated as a failure of the challenge, but //! assuming no one else votes, the defender always get a free vote on their //! own challenge keeping them in the society. The Head member is exempt from the //! negative outcome of a membership challenge. //! //! #### Society Treasury //! //! The membership society is independently funded by a treasury managed by this //! module. Some subset of this treasury is placed in a Society Pot, which is used //! to determine the number of accepted bids. //! //! #### Rate of Growth //! //! The membership society can grow at a rate of 10 accepted candidates per rotation period up //! to the max membership threshold. Once this threshold is met, candidate selections //! are stalled until there is space for new members to join. This can be resolved by //! voting out existing members through the random challenges or by using governance //! to increase the maximum membership count. //! //! ### User Life Cycle //! //! A user can go through the following phases: //! //! ```ignore //! +-------> User <----------+ //! | + | //! | | | //! +----------------------------------------------+ //! | | | | | //! | | v | | //! | | Bidder <-----------+ | //! | | + | | //! | | | + | //! | | v Suspended | //! | | Candidate +----> Candidate | //! | | + + | //! | | | | | //! | + | | | //! | Suspended +------>| | | //! | Member | | | //! | ^ | | | //! | | v | | //! | +-------+ Member <----------+ | //! | | //! | | //! +------------------Society---------------------+ //! ``` //! //! #### Initialization //! //! The society is initialized with a single member who is automatically chosen as the Head. //! //! #### Bid Phase //! //! New users must have a bid to join the society. //! //! A user can make a bid by reserving a deposit. Alternatively, an already existing member //! can create a bid on a user's behalf by "vouching" for them. //! //! A bid includes reward information that the user would like to receive for joining //! the society. A vouching bid can additionally request some portion of that reward as a tip //! to the voucher for vouching for the prospective candidate. //! //! Every rotation period, Bids are ordered by reward amount, and the module //! selects as many bids the Society Pot can support for that period. //! //! These selected bids become candidates and move on to the Candidate phase. //! Bids that were not selected stay in the bidder pool until they are selected or //! a user chooses to "unbid". //! //! #### Candidate Phase //! //! Once a bidder becomes a candidate, members vote whether to approve or reject //! that candidate into society. This voting process also happens during a rotation period. //! //! The approval and rejection criteria for candidates are not set on chain, //! and may change for different societies. //! //! At the end of the rotation period, we collect the votes for a candidate //! and randomly select a vote as the final outcome. //! //! ```ignore //! [ a-accept, r-reject, s-skeptic ] //! +----------------------------------+ //! | | //! | Member |0|1|2|3|4|5|6|7|8|9| | //! | ----------------------------- | //! | Vote |a|a|a|r|s|r|a|a|s|a| | //! | ----------------------------- | //! | Selected | | | |x| | | | | | | | //! | | //! +----------------------------------+ //! //! Result: Rejected //! ``` //! //! Each member that voted opposite to this randomly selected vote is punished by //! slashing their unclaimed payouts and increasing the number of strikes they have. //! //! These slashed funds are given to a random user who voted the same as the //! selected vote as a reward for participating in the vote. //! //! If the candidate wins the vote, they receive their bid reward as a future payout. //! If the bid was placed by a voucher, they will receive their portion of the reward, //! before the rest is paid to the winning candidate. //! //! One winning candidate is selected as the Head of the members. This is randomly //! chosen, weighted by the number of approvals the winning candidates accumulated. //! //! If the candidate loses the vote, they are suspended and it is up to the Suspension //! Judgement origin to determine if the candidate should go through the bidding process //! again, should be accepted into the membership society, or rejected and their deposit //! slashed. //! //! #### Member Phase //! //! Once a candidate becomes a member, their role is to participate in society. //! //! Regular participation involves voting on candidates who want to join the membership //! society, and by voting in the right way, a member will accumulate future payouts. //! When a payout matures, members are able to claim those payouts. //! //! Members can also vouch for users to join the society, and request a "tip" from //! the fees the new member would collect by joining the society. This vouching //! process is useful in situations where a user may not have enough balance to //! satisfy the bid deposit. A member can only vouch one user at a time. //! //! During rotation periods, a random group of members are selected as "skeptics". //! These skeptics are expected to vote on the current candidates. If they do not vote, //! their skeptic status is treated as a rejection vote, the member is deemed //! "lazy", and are given a strike per missing vote. //! //! There is a challenge period in parallel to the rotation period. During a challenge period, //! a random member is selected to defend their membership to the society. Other members //! make a traditional majority-wins vote to determine if the member should stay in the society. //! Ties are treated as a failure of the challenge. //! //! If a member accumulates too many strikes or fails their membership challenge, //! they will become suspended. While a member is suspended, they are unable to //! claim matured payouts. It is up to the Suspension Judgement origin to determine //! if the member should re-enter society or be removed from society with all their //! future payouts slashed. //! //! ## Interface //! //! ### Dispatchable Functions //! //! #### For General Users //! //! * `bid` - A user can make a bid to join the membership society by reserving a deposit. //! * `unbid` - A user can withdraw their bid for entry, the deposit is returned. //! //! #### For Members //! //! * `vouch` - A member can place a bid on behalf of a user to join the membership society. //! * `unvouch` - A member can revoke their vouch for a user. //! * `vote` - A member can vote to approve or reject a candidate's request to join the society. //! * `defender_vote` - A member can vote to approve or reject a defender's continued membership //! to the society. //! * `payout` - A member can claim their first matured payment. //! * `unfound` - Allow the founder to unfound the society when they are the only member. //! //! #### For Super Users //! //! * `found` - The founder origin can initiate this society. Useful for bootstrapping the Society //! pallet on an already running chain. //! * `judge_suspended_member` - The suspension judgement origin is able to make //! judgement on a suspended member. //! * `judge_suspended_candidate` - The suspension judgement origin is able to //! make judgement on a suspended candidate. //! * `set_max_membership` - The ROOT origin can update the maximum member count for the society. //! The max membership count must be greater than 1. // Ensure we're `no_std` when compiling for Wasm. #![cfg_attr(not(feature = "std"), no_std)] #[cfg(test)] mod mock; #[cfg(test)] mod tests; use codec::{Decode, Encode}; use frame_support::traits::{ BalanceStatus, ChangeMembers, Currency, EnsureOrigin, ExistenceRequirement::AllowDeath, Get, Imbalance, OnUnbalanced, Randomness, ReservableCurrency, }; use frame_support::weights::Weight; use frame_support::{ decl_error, decl_event, decl_module, decl_storage, dispatch::DispatchResult, ensure, }; use frame_system::{self as system, ensure_root, ensure_signed}; use rand_chacha::{ rand_core::{RngCore, SeedableRng}, ChaChaRng, }; use sp_runtime::{ traits::{ AccountIdConversion, CheckedSub, Hash, IntegerSquareRoot, Saturating, StaticLookup, TrailingZeroInput, Zero, }, ModuleId, Percent, RuntimeDebug, }; use sp_std::prelude::*; type BalanceOf<T, I> = <<T as Trait<I>>::Currency as Currency<<T as system::Trait>::AccountId>>::Balance; type NegativeImbalanceOf<T> = <<T as Trait>::Currency as Currency<<T as frame_system::Trait>::AccountId>>::NegativeImbalance; /// The module's configuration trait. pub trait Trait<I = DefaultInstance>: system::Trait { /// The overarching event type. type Event: From<Event<Self, I>> + Into<<Self as system::Trait>::Event>; /// The societies's module id type ModuleId: Get<ModuleId>; /// The currency type used for bidding. type Currency: ReservableCurrency<Self::AccountId>; /// Something that provides randomness in the runtime. type Randomness: Randomness<Self::Hash>; /// The minimum amount of a deposit required for a bid to be made. type CandidateDeposit: Get<BalanceOf<Self, I>>; /// The amount of the unpaid reward that gets deducted in the case that either a skeptic /// doesn't vote or someone votes in the wrong way. type WrongSideDeduction: Get<BalanceOf<Self, I>>; /// The number of times a member may vote the wrong way (or not at all, when they are a skeptic) /// before they become suspended. type MaxStrikes: Get<u32>; /// The amount of incentive paid within each period. Doesn't include VoterTip. type PeriodSpend: Get<BalanceOf<Self, I>>; /// The receiver of the signal for when the members have changed. type MembershipChanged: ChangeMembers<Self::AccountId>; /// The number of blocks between candidate/membership rotation periods. type RotationPeriod: Get<Self::BlockNumber>; /// The maximum duration of the payout lock. type MaxLockDuration: Get<Self::BlockNumber>; /// The origin that is allowed to call `found`. type FounderSetOrigin: EnsureOrigin<Self::Origin>; /// The origin that is allowed to make suspension judgements. type SuspensionJudgementOrigin: EnsureOrigin<Self::Origin>; /// The number of blocks between membership challenges. type ChallengePeriod: Get<Self::BlockNumber>; } /// A vote by a member on a candidate application. #[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] pub enum Vote { /// The member has been chosen to be skeptic and has not yet taken any action. Skeptic, /// The member has rejected the candidate's application. Reject, /// The member approves of the candidate's application. Approve, } /// A judgement by the suspension judgement origin on a suspended candidate. #[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] pub enum Judgement { /// The suspension judgement origin takes no direct judgment /// and places the candidate back into the bid pool. Rebid, /// The suspension judgement origin has rejected the candidate's application. Reject, /// The suspension judgement origin approves of the candidate's application. Approve, } /// Details of a payout given as a per-block linear "trickle". #[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, Default)] pub struct Payout<Balance, BlockNumber> { /// Total value of the payout. value: Balance, /// Block number at which the payout begins. begin: BlockNumber, /// Total number of blocks over which the payout is spread. duration: BlockNumber, /// Total value paid out so far. paid: Balance, } /// Status of a vouching member. #[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] pub enum VouchingStatus { /// Member is currently vouching for a user. Vouching, /// Member is banned from vouching for other members. Banned, } /// Number of strikes that a member has against them. pub type StrikeCount = u32; /// A bid for entry into society. #[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] pub struct Bid<AccountId, Balance> { /// The bidder/candidate trying to enter society who: AccountId, /// The kind of bid placed for this bidder/candidate. See `BidKind`. kind: BidKind<AccountId, Balance>, /// The reward that the bidder has requested for successfully joining the society. value: Balance, } /// A vote by a member on a candidate application. #[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] pub enum BidKind<AccountId, Balance> { /// The CandidateDeposit was paid for this bid. Deposit(Balance), /// A member vouched for this bid. The account should be reinstated into `Members` once the /// bid is successful (or if it is rescinded prior to launch). Vouch(AccountId, Balance), } impl<AccountId: PartialEq, Balance> BidKind<AccountId, Balance> { fn check_voucher(&self, v: &AccountId) -> DispatchResult { if let BidKind::Vouch(ref a, _) = self { if a == v { Ok(()) } else { Err("incorrect identity")? } } else { Err("not vouched")? } } } // This module's storage items. decl_storage! { trait Store for Module<T: Trait<I>, I: Instance=DefaultInstance> as Society { /// The first member. pub Founder get(fn founder) build(|config: &GenesisConfig<T, I>| config.members.first().cloned()): Option<T::AccountId>; /// A hash of the rules of this society concerning membership. Can only be set once and /// only by the founder. pub Rules get(fn rules): Option<T::Hash>; /// The current set of candidates; bidders that are attempting to become members. pub Candidates get(fn candidates): Vec<Bid<T::AccountId, BalanceOf<T, I>>>; /// The set of suspended candidates. pub SuspendedCandidates get(fn suspended_candidate): map hasher(twox_64_concat) T::AccountId => Option<(BalanceOf<T, I>, BidKind<T::AccountId, BalanceOf<T, I>>)>; /// Amount of our account balance that is specifically for the next round's bid(s). pub Pot get(fn pot) config(): BalanceOf<T, I>; /// The most primary from the most recently approved members. pub Head get(fn head) build(|config: &GenesisConfig<T, I>| config.members.first().cloned()): Option<T::AccountId>; /// The current set of members, ordered. pub Members get(fn members) build(|config: &GenesisConfig<T, I>| { let mut m = config.members.clone(); m.sort(); m }): Vec<T::AccountId>; /// The set of suspended members. pub SuspendedMembers get(fn suspended_member): map hasher(twox_64_concat) T::AccountId => bool; /// The current bids, stored ordered by the value of the bid. Bids: Vec<Bid<T::AccountId, BalanceOf<T, I>>>; /// Members currently vouching or banned from vouching again Vouching get(fn vouching): map hasher(twox_64_concat) T::AccountId => Option<VouchingStatus>; /// Pending payouts; ordered by block number, with the amount that should be paid out. Payouts: map hasher(twox_64_concat) T::AccountId => Vec<(T::BlockNumber, BalanceOf<T, I>)>; /// The ongoing number of losing votes cast by the member. Strikes: map hasher(twox_64_concat) T::AccountId => StrikeCount; /// Double map from Candidate -> Voter -> (Maybe) Vote. Votes: double_map hasher(twox_64_concat) T::AccountId, hasher(twox_64_concat) T::AccountId => Option<Vote>; /// The defending member currently being challenged. Defender get(fn defender): Option<T::AccountId>; /// Votes for the defender. DefenderVotes: map hasher(twox_64_concat) T::AccountId => Option<Vote>; /// The max number of members for the society at one time. MaxMembers get(fn max_members) config(): u32; } add_extra_genesis { config(members): Vec<T::AccountId>; } } // The module's dispatchable functions. decl_module! { /// The module declaration. pub struct Module<T: Trait<I>, I: Instance=DefaultInstance> for enum Call where origin: T::Origin { type Error = Error<T, I>; /// The minimum amount of a deposit required for a bid to be made. const CandidateDeposit: BalanceOf<T, I> = T::CandidateDeposit::get(); /// The amount of the unpaid reward that gets deducted in the case that either a skeptic /// doesn't vote or someone votes in the wrong way. const WrongSideDeduction: BalanceOf<T, I> = T::WrongSideDeduction::get(); /// The number of times a member may vote the wrong way (or not at all, when they are a skeptic) /// before they become suspended. const MaxStrikes: u32 = T::MaxStrikes::get(); /// The amount of incentive paid within each period. Doesn't include VoterTip. const PeriodSpend: BalanceOf<T, I> = T::PeriodSpend::get(); /// The number of blocks between candidate/membership rotation periods. const RotationPeriod: T::BlockNumber = T::RotationPeriod::get(); /// The number of blocks between membership challenges. const ChallengePeriod: T::BlockNumber = T::ChallengePeriod::get(); /// The societies's module id const ModuleId: ModuleId = T::ModuleId::get(); // Used for handling module events. fn deposit_event() = default; /// A user outside of the society can make a bid for entry. /// /// Payment: `CandidateDeposit` will be reserved for making a bid. It is returned /// when the bid becomes a member, or if the bid calls `unbid`. /// /// The dispatch origin for this call must be _Signed_. /// /// Parameters: /// - `value`: A one time payment the bid would like to receive when joining the society. /// /// # <weight> /// Key: B (len of bids), C (len of candidates), M (len of members), X (balance reserve) /// - Storage Reads: /// - One storage read to check for suspended candidate. O(1) /// - One storage read to check for suspended member. O(1) /// - One storage read to retrieve all current bids. O(B) /// - One storage read to retrieve all current candidates. O(C) /// - One storage read to retrieve all members. O(M) /// - Storage Writes: /// - One storage mutate to add a new bid to the vector O(B) (TODO: possible optimization w/ read) /// - Up to one storage removal if bid.len() > MAX_BID_COUNT. O(1) /// - Notable Computation: /// - O(B + C + log M) search to check user is not already a part of society. /// - O(log B) search to insert the new bid sorted. /// - External Module Operations: /// - One balance reserve operation. O(X) /// - Up to one balance unreserve operation if bids.len() > MAX_BID_COUNT. /// - Events: /// - One event for new bid. /// - Up to one event for AutoUnbid if bid.len() > MAX_BID_COUNT. /// /// Total Complexity: O(M + B + C + logM + logB + X) /// # </weight> #[weight = T::MaximumBlockWeight::get() / 10] pub fn bid(origin, value: BalanceOf<T, I>) -> DispatchResult { let who = ensure_signed(origin)?; ensure!(!<SuspendedCandidates<T, I>>::contains_key(&who), Error::<T, I>::Suspended); ensure!(!<SuspendedMembers<T, I>>::contains_key(&who), Error::<T, I>::Suspended); let bids = <Bids<T, I>>::get(); ensure!(!Self::is_bid(&bids, &who), Error::<T, I>::AlreadyBid); let candidates = <Candidates<T, I>>::get(); ensure!(!Self::is_candidate(&candidates, &who), Error::<T, I>::AlreadyCandidate); let members = <Members<T, I>>::get(); ensure!(!Self::is_member(&members ,&who), Error::<T, I>::AlreadyMember); let deposit = T::CandidateDeposit::get(); T::Currency::reserve(&who, deposit)?; Self::put_bid(bids, &who, value.clone(), BidKind::Deposit(deposit)); Self::deposit_event(RawEvent::Bid(who, value)); Ok(()) } /// A bidder can remove their bid for entry into society. /// By doing so, they will have their candidate deposit returned or /// they will unvouch their voucher. /// /// Payment: The bid deposit is unreserved if the user made a bid. /// /// The dispatch origin for this call must be _Signed_ and a bidder. /// /// Parameters: /// - `pos`: Position in the `Bids` vector of the bid who wants to unbid. /// /// # <weight> /// Key: B (len of bids), X (balance unreserve) /// - One storage read and write to retrieve and update the bids. O(B) /// - Either one unreserve balance action O(X) or one vouching storage removal. O(1) /// - One event. /// /// Total Complexity: O(B + X) /// # </weight> #[weight = T::MaximumBlockWeight::get() / 10] pub fn unbid(origin, pos: u32) -> DispatchResult { let who = ensure_signed(origin)?; let pos = pos as usize; <Bids<T, I>>::mutate(|b| if pos < b.len() && b[pos].who == who { // Either unreserve the deposit or free up the vouching member. // In neither case can we do much if the action isn't completable, but there's // no reason that either should fail. match b.remove(pos).kind { BidKind::Deposit(deposit) => { let _ = T::Currency::unreserve(&who, deposit); } BidKind::Vouch(voucher, _) => { <Vouching<T, I>>::remove(&voucher); } } Self::deposit_event(RawEvent::Unbid(who)); Ok(()) } else { Err(Error::<T, I>::BadPosition)? } ) } /// As a member, vouch for someone to join society by placing a bid on their behalf. /// /// There is no deposit required to vouch for a new bid, but a member can only vouch for /// one bid at a time. If the bid becomes a suspended candidate and ultimately rejected by /// the suspension judgement origin, the member will be banned from vouching again. /// /// As a vouching member, you can claim a tip if the candidate is accepted. This tip will /// be paid as a portion of the reward the member will receive for joining the society. /// /// The dispatch origin for this call must be _Signed_ and a member. /// /// Parameters: /// - `who`: The user who you would like to vouch for. /// - `value`: The total reward to be paid between you and the candidate if they become /// a member in the society. /// - `tip`: Your cut of the total `value` payout when the candidate is inducted into /// the society. Tips larger than `value` will be saturated upon payout. /// /// # <weight> /// Key: B (len of bids), C (len of candidates), M (len of members) /// - Storage Reads: /// - One storage read to retrieve all members. O(M) /// - One storage read to check member is not already vouching. O(1) /// - One storage read to check for suspended candidate. O(1) /// - One storage read to check for suspended member. O(1) /// - One storage read to retrieve all current bids. O(B) /// - One storage read to retrieve all current candidates. O(C) /// - Storage Writes: /// - One storage write to insert vouching status to the member. O(1) /// - One storage mutate to add a new bid to the vector O(B) (TODO: possible optimization w/ read) /// - Up to one storage removal if bid.len() > MAX_BID_COUNT. O(1) /// - Notable Computation: /// - O(log M) search to check sender is a member. /// - O(B + C + log M) search to check user is not already a part of society. /// - O(log B) search to insert the new bid sorted. /// - External Module Operations: /// - One balance reserve operation. O(X) /// - Up to one balance unreserve operation if bids.len() > MAX_BID_COUNT. /// - Events: /// - One event for vouch. /// - Up to one event for AutoUnbid if bid.len() > MAX_BID_COUNT. /// /// Total Complexity: O(M + B + C + logM + logB + X) /// # </weight> #[weight = T::MaximumBlockWeight::get() / 10] pub fn vouch(origin, who: T::AccountId, value: BalanceOf<T, I>, tip: BalanceOf<T, I>) -> DispatchResult { let voucher = ensure_signed(origin)?; // Check user is not suspended. ensure!(!<SuspendedCandidates<T, I>>::contains_key(&who), Error::<T, I>::Suspended); ensure!(!<SuspendedMembers<T, I>>::contains_key(&who), Error::<T, I>::Suspended); // Check user is not a bid or candidate. let bids = <Bids<T, I>>::get(); ensure!(!Self::is_bid(&bids, &who), Error::<T, I>::AlreadyBid); let candidates = <Candidates<T, I>>::get(); ensure!(!Self::is_candidate(&candidates, &who), Error::<T, I>::AlreadyCandidate); // Check user is not already a member. let members = <Members<T, I>>::get(); ensure!(!Self::is_member(&members, &who), Error::<T, I>::AlreadyMember); // Check sender can vouch. ensure!(Self::is_member(&members, &voucher), Error::<T, I>::NotMember); ensure!(!<Vouching<T, I>>::contains_key(&voucher), Error::<T, I>::AlreadyVouching); <Vouching<T, I>>::insert(&voucher, VouchingStatus::Vouching); Self::put_bid(bids, &who, value.clone(), BidKind::Vouch(voucher.clone(), tip)); Self::deposit_event(RawEvent::Vouch(who, value, voucher)); Ok(()) } /// As a vouching member, unvouch a bid. This only works while vouched user is /// only a bidder (and not a candidate). /// /// The dispatch origin for this call must be _Signed_ and a vouching member. /// /// Parameters: /// - `pos`: Position in the `Bids` vector of the bid who should be unvouched. /// /// # <weight> /// Key: B (len of bids) /// - One storage read O(1) to check the signer is a vouching member. /// - One storage mutate to retrieve and update the bids. O(B) /// - One vouching storage removal. O(1) /// - One event. /// /// Total Complexity: O(B) /// # </weight> #[weight = T::MaximumBlockWeight::get() / 10] pub fn unvouch(origin, pos: u32) -> DispatchResult { let voucher = ensure_signed(origin)?; ensure!(Self::vouching(&voucher) == Some(VouchingStatus::Vouching), Error::<T, I>::NotVouching); let pos = pos as usize; <Bids<T, I>>::mutate(|b| if pos < b.len() { b[pos].kind.check_voucher(&voucher)?; <Vouching<T, I>>::remove(&voucher); let who = b.remove(pos).who; Self::deposit_event(RawEvent::Unvouch(who)); Ok(()) } else { Err(Error::<T, I>::BadPosition)? } ) } /// As a member, vote on a candidate. /// /// The dispatch origin for this call must be _Signed_ and a member. /// /// Parameters: /// - `candidate`: The candidate that the member would like to bid on. /// - `approve`: A boolean which says if the candidate should be /// approved (`true`) or rejected (`false`). /// /// # <weight> /// Key: C (len of candidates), M (len of members) /// - One storage read O(M) and O(log M) search to check user is a member. /// - One account lookup. /// - One storage read O(C) and O(C) search to check that user is a candidate. /// - One storage write to add vote to votes. O(1) /// - One event. /// /// Total Complexity: O(M + logM + C) /// # </weight> #[weight = T::MaximumBlockWeight::get() / 10] pub fn vote(origin, candidate: <T::Lookup as StaticLookup>::Source, approve: bool) { let voter = ensure_signed(origin)?; let candidate = T::Lookup::lookup(candidate)?; let candidates = <Candidates<T, I>>::get(); ensure!(Self::is_candidate(&candidates, &candidate), Error::<T, I>::NotCandidate); let members = <Members<T, I>>::get(); ensure!(Self::is_member(&members, &voter), Error::<T, I>::NotMember); let vote = if approve { Vote::Approve } else { Vote::Reject }; <Votes<T, I>>::insert(&candidate, &voter, vote); Self::deposit_event(RawEvent::Vote(candidate, voter, approve)); } /// As a member, vote on the defender. /// /// The dispatch origin for this call must be _Signed_ and a member. /// /// Parameters: /// - `approve`: A boolean which says if the candidate should be /// approved (`true`) or rejected (`false`). /// /// # <weight> /// - Key: M (len of members) /// - One storage read O(M) and O(log M) search to check user is a member. /// - One storage write to add vote to votes. O(1) /// - One event. /// /// Total Complexity: O(M + logM) /// # </weight> #[weight = T::MaximumBlockWeight::get() / 10] pub fn defender_vote(origin, approve: bool) { let voter = ensure_signed(origin)?; let members = <Members<T, I>>::get(); ensure!(Self::is_member(&members, &voter), Error::<T, I>::NotMember); let vote = if approve { Vote::Approve } else { Vote::Reject }; <DefenderVotes<T, I>>::insert(&voter, vote); Self::deposit_event(RawEvent::DefenderVote(voter, approve)); } /// Transfer the first matured payout for the sender and remove it from the records. /// /// NOTE: This extrinsic needs to be called multiple times to claim multiple matured payouts. /// /// Payment: The member will receive a payment equal to their first matured /// payout to their free balance. /// /// The dispatch origin for this call must be _Signed_ and a member with /// payouts remaining. /// /// # <weight> /// Key: M (len of members), P (number of payouts for a particular member) /// - One storage read O(M) and O(log M) search to check signer is a member. /// - One storage read O(P) to get all payouts for a member. /// - One storage read O(1) to get the current block number. /// - One currency transfer call. O(X) /// - One storage write or removal to update the member's payouts. O(P) /// /// Total Complexity: O(M + logM + P + X) /// # </weight> #[weight = T::MaximumBlockWeight::get() / 10] pub fn payout(origin) { let who = ensure_signed(origin)?; let members = <Members<T, I>>::get(); ensure!(Self::is_member(&members, &who), Error::<T, I>::NotMember); let mut payouts = <Payouts<T, I>>::get(&who); if let Some((when, amount)) = payouts.first() { if when <= &<system::Module<T>>::block_number() { T::Currency::transfer(&Self::payouts(), &who, *amount, AllowDeath)?; payouts.remove(0); if payouts.is_empty() { <Payouts<T, I>>::remove(&who); } else { <Payouts<T, I>>::insert(&who, payouts); } return Ok(()) } } Err(Error::<T, I>::NoPayout)? } /// Found the society. /// /// This is done as a discrete action in order to allow for the /// module to be included into a running chain and can only be done once. /// /// The dispatch origin for this call must be from the _FounderSetOrigin_. /// /// Parameters: /// - `founder` - The first member and head of the newly founded society. /// - `max_members` - The initial max number of members for the society. /// - `rules` - The rules of this society concerning membership. /// /// # <weight> /// - Two storage mutates to set `Head` and `Founder`. O(1) /// - One storage write to add the first member to society. O(1) /// - One event. /// /// Total Complexity: O(1) /// # </weight> #[weight = T::MaximumBlockWeight::get() / 10] fn found(origin, founder: T::AccountId, max_members: u32, rules: Vec<u8>) { T::FounderSetOrigin::ensure_origin(origin)?; ensure!(!<Head<T, I>>::exists(), Error::<T, I>::AlreadyFounded); ensure!(max_members > 1, Error::<T, I>::MaxMembers); // This should never fail in the context of this function... <MaxMembers<I>>::put(max_members); Self::add_member(&founder)?; <Head<T, I>>::put(&founder); <Founder<T, I>>::put(&founder); Rules::<T, I>::put(T::Hashing::hash(&rules)); Self::deposit_event(RawEvent::Founded(founder)); } /// Annul the founding of the society. /// /// The dispatch origin for this call must be Signed, and the signing account must be both /// the `Founder` and the `Head`. This implies that it may only be done when there is one /// member. /// /// # <weight> /// - Two storage reads O(1). /// - Four storage removals O(1). /// - One event. /// /// Total Complexity: O(1) /// # </weight> #[weight = T::MaximumBlockWeight::get() / 10] fn unfound(origin) { let founder = ensure_signed(origin)?; ensure!(Founder::<T, I>::get() == Some(founder.clone()), Error::<T, I>::NotFounder); ensure!(Head::<T, I>::get() == Some(founder.clone()), Error::<T, I>::NotHead); Members::<T, I>::kill(); Head::<T, I>::kill(); Founder::<T, I>::kill(); Rules::<T, I>::kill(); Candidates::<T, I>::kill(); SuspendedCandidates::<T, I>::remove_all(); Self::deposit_event(RawEvent::Unfounded(founder)); } /// Allow suspension judgement origin to make judgement on a suspended member. /// /// If a suspended member is forgiven, we simply add them back as a member, not affecting /// any of the existing storage items for that member. /// /// If a suspended member is rejected, remove all associated storage items, including /// their payouts, and remove any vouched bids they currently have. /// /// The dispatch origin for this call must be from the _SuspensionJudgementOrigin_. /// /// Parameters: /// - `who` - The suspended member to be judged. /// - `forgive` - A boolean representing whether the suspension judgement origin /// forgives (`true`) or rejects (`false`) a suspended member. /// /// # <weight> /// Key: B (len of bids), M (len of members) /// - One storage read to check `who` is a suspended member. O(1) /// - Up to one storage write O(M) with O(log M) binary search to add a member back to society. /// - Up to 3 storage removals O(1) to clean up a removed member. /// - Up to one storage write O(B) with O(B) search to remove vouched bid from bids. /// - Up to one additional event if unvouch takes place. /// - One storage removal. O(1) /// - One event for the judgement. /// /// Total Complexity: O(M + logM + B) /// # </weight> #[weight = T::MaximumBlockWeight::get() / 10] fn judge_suspended_member(origin, who: T::AccountId, forgive: bool) { T::SuspensionJudgementOrigin::ensure_origin(origin)?; ensure!(<SuspendedMembers<T, I>>::contains_key(&who), Error::<T, I>::NotSuspended); if forgive { // Try to add member back to society. Can fail with `MaxMembers` limit. Self::add_member(&who)?; } else { // Cancel a suspended member's membership, remove their payouts. <Payouts<T, I>>::remove(&who); <Strikes<T, I>>::remove(&who); // Remove their vouching status, potentially unbanning them in the future. if <Vouching<T, I>>::take(&who) == Some(VouchingStatus::Vouching) { // Try to remove their bid if they are vouching. // If their vouch is already a candidate, do nothing. <Bids<T, I>>::mutate(|bids| // Try to find the matching bid if let Some(pos) = bids.iter().position(|b| b.kind.check_voucher(&who).is_ok()) { // Remove the bid, and emit an event let vouched = bids.remove(pos).who; Self::deposit_event(RawEvent::Unvouch(vouched)); } ); } } <SuspendedMembers<T, I>>::remove(&who); Self::deposit_event(RawEvent::SuspendedMemberJudgement(who, forgive)); } /// Allow suspended judgement origin to make judgement on a suspended candidate. /// /// If the judgement is `Approve`, we add them to society as a member with the appropriate /// payment for joining society. /// /// If the judgement is `Reject`, we either slash the deposit of the bid, giving it back /// to the society treasury, or we ban the voucher from vouching again. /// /// If the judgement is `Rebid`, we put the candidate back in the bid pool and let them go /// through the induction process again. /// /// The dispatch origin for this call must be from the _SuspensionJudgementOrigin_. /// /// Parameters: /// - `who` - The suspended candidate to be judged. /// - `judgement` - `Approve`, `Reject`, or `Rebid`. /// /// # <weight> /// Key: B (len of bids), M (len of members), X (balance action) /// - One storage read to check `who` is a suspended candidate. /// - One storage removal of the suspended candidate. /// - Approve Logic /// - One storage read to get the available pot to pay users with. O(1) /// - One storage write to update the available pot. O(1) /// - One storage read to get the current block number. O(1) /// - One storage read to get all members. O(M) /// - Up to one unreserve currency action. /// - Up to two new storage writes to payouts. /// - Up to one storage write with O(log M) binary search to add a member to society. /// - Reject Logic /// - Up to one repatriate reserved currency action. O(X) /// - Up to one storage write to ban the vouching member from vouching again. /// - Rebid Logic /// - Storage mutate with O(log B) binary search to place the user back into bids. /// - Up to one additional event if unvouch takes place. /// - One storage removal. /// - One event for the judgement. /// /// Total Complexity: O(M + logM + B + X) /// # </weight> #[weight = T::MaximumBlockWeight::get() / 10] fn judge_suspended_candidate(origin, who: T::AccountId, judgement: Judgement) { T::SuspensionJudgementOrigin::ensure_origin(origin)?; if let Some((value, kind)) = <SuspendedCandidates<T, I>>::get(&who) { match judgement { Judgement::Approve => { // Suspension Judgement origin has approved this candidate // Make sure we can pay them let pot = Self::pot(); ensure!(pot >= value, Error::<T, I>::InsufficientPot); // Try to add user as a member! Can fail with `MaxMember` limit. Self::add_member(&who)?; // Reduce next pot by payout <Pot<T, I>>::put(pot - value); // Add payout for new candidate let maturity = <system::Module<T>>::block_number() + Self::lock_duration(Self::members().len() as u32); Self::pay_accepted_candidate(&who, value, kind, maturity); } Judgement::Reject => { // Founder has rejected this candidate match kind { BidKind::Deposit(deposit) => { // Slash deposit and move it to the society account let _ = T::Currency::repatriate_reserved(&who, &Self::account_id(), deposit, BalanceStatus::Free); } BidKind::Vouch(voucher, _) => { // Ban the voucher from vouching again <Vouching<T, I>>::insert(&voucher, VouchingStatus::Banned); } } } Judgement::Rebid => { // Founder has taken no judgement, and candidate is placed back into the pool. let bids = <Bids<T, I>>::get(); Self::put_bid(bids, &who, value, kind); } } // Remove suspended candidate <SuspendedCandidates<T, I>>::remove(who); } else { Err(Error::<T, I>::NotSuspended)? } } /// Allows root origin to change the maximum number of members in society. /// Max membership count must be greater than 1. /// /// The dispatch origin for this call must be from _ROOT_. /// /// Parameters: /// - `max` - The maximum number of members for the society. /// /// # <weight> /// - One storage write to update the max. O(1) /// - One event. /// /// Total Complexity: O(1) /// # </weight> #[weight = T::MaximumBlockWeight::get() / 10] fn set_max_members(origin, max: u32) { ensure_root(origin)?; ensure!(max > 1, Error::<T, I>::MaxMembers); MaxMembers::<I>::put(max); Self::deposit_event(RawEvent::NewMaxMembers(max)); } fn on_initialize(n: T::BlockNumber) -> Weight { let mut members = vec![]; let mut weight = 0; // Run a candidate/membership rotation if (n % T::RotationPeriod::get()).is_zero() { members = <Members<T, I>>::get(); Self::rotate_period(&mut members); weight += T::MaximumBlockWeight::get() / 20; } // Run a challenge rotation if (n % T::ChallengePeriod::get()).is_zero() { // Only read members if not already read. if members.is_empty() { members = <Members<T, I>>::get(); } Self::rotate_challenge(&mut members); weight += T::MaximumBlockWeight::get() / 20; } weight } } } decl_error! { /// Errors for this module. pub enum Error for Module<T: Trait<I>, I: Instance> { /// An incorrect position was provided. BadPosition, /// User is not a member. NotMember, /// User is already a member. AlreadyMember, /// User is suspended. Suspended, /// User is not suspended. NotSuspended, /// Nothing to payout. NoPayout, /// Society already founded. AlreadyFounded, /// Not enough in pot to accept candidate. InsufficientPot, /// Member is already vouching or banned from vouching again. AlreadyVouching, /// Member is not vouching. NotVouching, /// Cannot remove the head of the chain. Head, /// Cannot remove the founder. Founder, /// User has already made a bid. AlreadyBid, /// User is already a candidate. AlreadyCandidate, /// User is not a candidate. NotCandidate, /// Too many members in the society. MaxMembers, /// The caller is not the founder. NotFounder, /// The caller is not the head. NotHead, } } decl_event! { /// Events for this module. pub enum Event<T, I=DefaultInstance> where AccountId = <T as system::Trait>::AccountId, Balance = BalanceOf<T, I> { /// The society is founded by the given identity. \[founder\] Founded(AccountId), /// A membership bid just happened. The given account is the candidate's ID and their offer /// is the second. \[candidate_id, offer\] Bid(AccountId, Balance), /// A membership bid just happened by vouching. The given account is the candidate's ID and /// their offer is the second. The vouching party is the third. \[candidate_id, offer, vouching\] Vouch(AccountId, Balance, AccountId), /// A \[candidate\] was dropped (due to an excess of bids in the system). AutoUnbid(AccountId), /// A \[candidate\] was dropped (by their request). Unbid(AccountId), /// A \[candidate\] was dropped (by request of who vouched for them). Unvouch(AccountId), /// A group of candidates have been inducted. The batch's primary is the first value, the /// batch in full is the second. \[primary, candidates\] Inducted(AccountId, Vec<AccountId>), /// A suspended member has been judged. \[who, judged\] SuspendedMemberJudgement(AccountId, bool), /// A \[candidate\] has been suspended CandidateSuspended(AccountId), /// A \[member\] has been suspended MemberSuspended(AccountId), /// A \[member\] has been challenged Challenged(AccountId), /// A vote has been placed \[candidate, voter, vote\] Vote(AccountId, AccountId, bool), /// A vote has been placed for a defending member \[voter, vote\] DefenderVote(AccountId, bool), /// A new \[max\] member count has been set NewMaxMembers(u32), /// Society is unfounded. \[founder\] Unfounded(AccountId), /// Some funds were deposited into the society account. \[value\] Deposit(Balance), } } /// Simple ensure origin struct to filter for the founder account. pub struct EnsureFounder<T>(sp_std::marker::PhantomData<T>); impl<T: Trait> EnsureOrigin<T::Origin> for EnsureFounder<T> { type Success = T::AccountId; fn try_origin(o: T::Origin) -> Result<Self::Success, T::Origin> { o.into().and_then(|o| match (o, Founder::<T>::get()) { (system::RawOrigin::Signed(ref who), Some(ref f)) if who == f => Ok(who.clone()), (r, _) => Err(T::Origin::from(r)), }) } #[cfg(feature = "runtime-benchmarks")] fn successful_origin() -> T::Origin { let founder = Founder::<T>::get().expect("society founder should exist"); T::Origin::from(system::RawOrigin::Signed(founder)) } } /// Pick an item at pseudo-random from the slice, given the `rng`. `None` iff the slice is empty. fn pick_item<'a, R: RngCore, T>(rng: &mut R, items: &'a [T]) -> Option<&'a T> { if items.is_empty() { None } else { Some(&items[pick_usize(rng, items.len() - 1)]) } } /// Pick a new PRN, in the range [0, `max`] (inclusive). fn pick_usize<'a, R: RngCore>(rng: &mut R, max: usize) -> usize { (rng.next_u32() % (max as u32 + 1)) as usize } impl<T: Trait<I>, I: Instance> Module<T, I> { /// Puts a bid into storage ordered by smallest to largest value. /// Allows a maximum of 1000 bids in queue, removing largest value people first. fn put_bid( mut bids: Vec<Bid<T::AccountId, BalanceOf<T, I>>>, who: &T::AccountId, value: BalanceOf<T, I>, bid_kind: BidKind<T::AccountId, BalanceOf<T, I>>, ) { const MAX_BID_COUNT: usize = 1000; match bids.binary_search_by(|bid| bid.value.cmp(&value)) { // Insert new elements after the existing ones. This ensures new bids // with the same bid value are further down the list than existing ones. Ok(pos) => { let different_bid = bids .iter() // Easily extract the index we are on .enumerate() // Skip ahead to the suggested position .skip(pos) // Keep skipping ahead until the position changes .skip_while(|(_, x)| x.value <= bids[pos].value) // Get the element when things changed .next(); // If the element is not at the end of the list, insert the new element // in the spot. if let Some((p, _)) = different_bid { bids.insert(p, Bid { value, who: who.clone(), kind: bid_kind }); // If the element is at the end of the list, push the element on the end. } else { bids.push(Bid { value, who: who.clone(), kind: bid_kind }); } }, Err(pos) => bids.insert(pos, Bid { value, who: who.clone(), kind: bid_kind }), } // Keep it reasonably small. if bids.len() > MAX_BID_COUNT { let Bid { who: popped, kind, .. } = bids.pop().expect("b.len() > 1000; qed"); match kind { BidKind::Deposit(deposit) => { let _ = T::Currency::unreserve(&popped, deposit); }, BidKind::Vouch(voucher, _) => { <Vouching<T, I>>::remove(&voucher); }, } Self::deposit_event(RawEvent::AutoUnbid(popped)); } <Bids<T, I>>::put(bids); } /// Check a user is a bid. fn is_bid(bids: &Vec<Bid<T::AccountId, BalanceOf<T, I>>>, who: &T::AccountId) -> bool { // Bids are ordered by `value`, so we cannot binary search for a user. bids.iter().find(|bid| bid.who == *who).is_some() } /// Check a user is a candidate. fn is_candidate( candidates: &Vec<Bid<T::AccountId, BalanceOf<T, I>>>, who: &T::AccountId, ) -> bool { // Looking up a candidate is the same as looking up a bid Self::is_bid(candidates, who) } /// Check a user is a member. fn is_member(members: &Vec<T::AccountId>, who: &T::AccountId) -> bool { members.binary_search(who).is_ok() } /// Add a member to the sorted members list. If the user is already a member, do nothing. /// Can fail when `MaxMember` limit is reached, but has no side-effects. fn add_member(who: &T::AccountId) -> DispatchResult { let mut members = <Members<T, I>>::get(); ensure!(members.len() < MaxMembers::<I>::get() as usize, Error::<T, I>::MaxMembers); match members.binary_search(who) { // Add the new member Err(i) => { members.insert(i, who.clone()); T::MembershipChanged::change_members_sorted(&[who.clone()], &[], &members); <Members<T, I>>::put(members); Ok(()) }, // User is already a member, do nothing. Ok(_) => Ok(()), } } /// Remove a member from the members list, except the Head. /// /// NOTE: This does not correctly clean up a member from storage. It simply /// removes them from the Members storage item. pub fn remove_member(m: &T::AccountId) -> DispatchResult { ensure!(Self::head() != Some(m.clone()), Error::<T, I>::Head); ensure!(Self::founder() != Some(m.clone()), Error::<T, I>::Founder); let mut members = <Members<T, I>>::get(); match members.binary_search(&m) { Err(_) => Err(Error::<T, I>::NotMember)?, Ok(i) => { members.remove(i); T::MembershipChanged::change_members_sorted(&[], &[m.clone()], &members[..]); <Members<T, I>>::put(members); Ok(()) }, } } /// End the current period and begin a new one. fn rotate_period(members: &mut Vec<T::AccountId>) { let phrase = b"society_rotation"; let mut pot = <Pot<T, I>>::get(); // we'll need a random seed here. let seed = T::Randomness::random(phrase); // seed needs to be guaranteed to be 32 bytes. let seed = <[u8; 32]>::decode(&mut TrailingZeroInput::new(seed.as_ref())) .expect("input is padded with zeroes; qed"); let mut rng = ChaChaRng::from_seed(seed); // we assume there's at least one member or this logic won't work. if !members.is_empty() { let candidates = <Candidates<T, I>>::take(); // NOTE: This may cause member length to surpass `MaxMembers`, but results in no // consensus critical issues or side-effects. This is auto-correcting as members fall // out of society. members.reserve(candidates.len()); let maturity = <system::Module<T>>::block_number() + Self::lock_duration(members.len() as u32); let mut rewardees = Vec::new(); let mut total_approvals = 0; let mut total_slash = <BalanceOf<T, I>>::zero(); let mut total_payouts = <BalanceOf<T, I>>::zero(); let accepted = candidates .into_iter() .filter_map(|Bid { value, who: candidate, kind }| { let mut approval_count = 0; // Creates a vector of (vote, member) for the given candidate // and tallies total number of approve votes for that candidate. let votes = members .iter() .filter_map(|m| <Votes<T, I>>::take(&candidate, m).map(|v| (v, m))) .inspect(|&(v, _)| { if v == Vote::Approve { approval_count += 1 } }) .collect::<Vec<_>>(); // Select one of the votes at random. // Note that `Vote::Skeptical` and `Vote::Reject` both reject the candidate. let is_accepted = pick_item(&mut rng, &votes).map(|x| x.0) == Some(Vote::Approve); let matching_vote = if is_accepted { Vote::Approve } else { Vote::Reject }; let bad_vote = |m: &T::AccountId| { // Voter voted wrong way (or was just a lazy skeptic) then reduce their // payout and increase their strikes. after MaxStrikes then they go into // suspension. let amount = Self::slash_payout(m, T::WrongSideDeduction::get()); let strikes = <Strikes<T, I>>::mutate(m, |s| { *s += 1; *s }); if strikes >= T::MaxStrikes::get() { Self::suspend_member(m); } amount }; // Collect the voters who had a matching vote. rewardees.extend( votes .into_iter() .filter_map(|(v, m)| { if v == matching_vote { Some(m) } else { total_slash += bad_vote(m); None } }) .cloned(), ); if is_accepted { total_approvals += approval_count; total_payouts += value; members.push(candidate.clone()); Self::pay_accepted_candidate(&candidate, value, kind, maturity); // We track here the total_approvals so that every candidate has a unique // range of numbers from 0 to `total_approvals` with length `approval_count` // so each candidate is proportionally represented when selecting a // "primary" below. Some((candidate, total_approvals, value)) } else { // Suspend Candidate <SuspendedCandidates<T, I>>::insert(&candidate, (value, kind)); Self::deposit_event(RawEvent::CandidateSuspended(candidate)); None } }) .collect::<Vec<_>>(); // Clean up all votes. <Votes<T, I>>::remove_all(); // Reward one of the voters who voted the right way. if !total_slash.is_zero() { if let Some(winner) = pick_item(&mut rng, &rewardees) { // If we can't reward them, not much that can be done. Self::bump_payout(winner, maturity, total_slash); } else { // Move the slashed amount back from payouts account to local treasury. let _ = T::Currency::transfer( &Self::payouts(), &Self::account_id(), total_slash, AllowDeath, ); } } // Fund the total payouts from the local treasury. if !total_payouts.is_zero() { // remove payout from pot and shift needed funds to the payout account. pot = pot.saturating_sub(total_payouts); // this should never fail since we ensure we can afford the payouts in a previous // block, but there's not much we can do to recover if it fails anyway. let _ = T::Currency::transfer( &Self::account_id(), &Self::payouts(), total_payouts, AllowDeath, ); } // if at least one candidate was accepted... if !accepted.is_empty() { // select one as primary, randomly chosen from the accepted, weighted by approvals. // Choose a random number between 0 and `total_approvals` let primary_point = pick_usize(&mut rng, total_approvals - 1); // Find the zero bid or the user who falls on that point let primary = accepted .iter() .find(|e| e.2.is_zero() || e.1 > primary_point) .expect( "e.1 of final item == total_approvals; \ worst case find will always return that item; qed", ) .0 .clone(); let accounts = accepted.into_iter().map(|x| x.0).collect::<Vec<_>>(); // Then write everything back out, signal the changed membership and leave an event. members.sort(); // NOTE: This may cause member length to surpass `MaxMembers`, but results in no // consensus critical issues or side-effects. This is auto-correcting as members // fall out of society. <Members<T, I>>::put(&members[..]); <Head<T, I>>::put(&primary); T::MembershipChanged::change_members_sorted(&accounts, &[], &members); Self::deposit_event(RawEvent::Inducted(primary, accounts)); } // Bump the pot by at most PeriodSpend, but less if there's not very much left in our // account. let unaccounted = T::Currency::free_balance(&Self::account_id()).saturating_sub(pot); pot += T::PeriodSpend::get().min(unaccounted / 2u8.into()); <Pot<T, I>>::put(&pot); } // Setup the candidates for the new intake let candidates = Self::take_selected(members.len(), pot); <Candidates<T, I>>::put(&candidates); // Select sqrt(n) random members from the society and make them skeptics. let pick_member = |_| pick_item(&mut rng, &members[..]).expect("exited if members empty; qed"); for skeptic in (0..members.len().integer_sqrt()).map(pick_member) { for Bid { who: c, .. } in candidates.iter() { <Votes<T, I>>::insert(c, skeptic, Vote::Skeptic); } } } /// Attempt to slash the payout of some member. Return the total amount that was deducted. fn slash_payout(who: &T::AccountId, value: BalanceOf<T, I>) -> BalanceOf<T, I> { let mut rest = value; let mut payouts = <Payouts<T, I>>::get(who); if !payouts.is_empty() { let mut dropped = 0; for (_, amount) in payouts.iter_mut() { if let Some(new_rest) = rest.checked_sub(&amount) { // not yet totally slashed after this one; drop it completely. rest = new_rest; dropped += 1; } else { // whole slash is accounted for. *amount -= rest; rest = Zero::zero(); break } } <Payouts<T, I>>::insert(who, &payouts[dropped..]); } value - rest } /// Bump the payout amount of `who`, to be unlocked at the given block number. fn bump_payout(who: &T::AccountId, when: T::BlockNumber, value: BalanceOf<T, I>) { if !value.is_zero() { <Payouts<T, I>>::mutate(who, |payouts| { match payouts.binary_search_by_key(&when, |x| x.0) { Ok(index) => payouts[index].1 += value, Err(index) => payouts.insert(index, (when, value)), } }); } } /// Suspend a user, removing them from the member list. fn suspend_member(who: &T::AccountId) { if Self::remove_member(&who).is_ok() { <SuspendedMembers<T, I>>::insert(who, true); <Strikes<T, I>>::remove(who); Self::deposit_event(RawEvent::MemberSuspended(who.clone())); } } /// Pay an accepted candidate their bid value. fn pay_accepted_candidate( candidate: &T::AccountId, value: BalanceOf<T, I>, kind: BidKind<T::AccountId, BalanceOf<T, I>>, maturity: T::BlockNumber, ) { let value = match kind { BidKind::Deposit(deposit) => { // In the case that a normal deposit bid is accepted we unreserve // the deposit. let _ = T::Currency::unreserve(candidate, deposit); value }, BidKind::Vouch(voucher, tip) => { // Check that the voucher is still vouching, else some other logic may have removed // their status. if <Vouching<T, I>>::take(&voucher) == Some(VouchingStatus::Vouching) { // In the case that a vouched-for bid is accepted we unset the // vouching status and transfer the tip over to the voucher. Self::bump_payout(&voucher, maturity, tip.min(value)); value.saturating_sub(tip) } else { value } }, }; Self::bump_payout(candidate, maturity, value); } /// End the current challenge period and start a new one. fn rotate_challenge(members: &mut Vec<T::AccountId>) { // Assume there are members, else don't run this logic. if !members.is_empty() { // End current defender rotation if let Some(defender) = Self::defender() { let mut approval_count = 0; let mut rejection_count = 0; // Tallies total number of approve and reject votes for the defender. members.iter().filter_map(|m| <DefenderVotes<T, I>>::take(m)).for_each( |v| match v { Vote::Approve => approval_count += 1, _ => rejection_count += 1, }, ); if approval_count <= rejection_count { // User has failed the challenge Self::suspend_member(&defender); *members = Self::members(); } // Clean up all votes. <DefenderVotes<T, I>>::remove_all(); } // Avoid challenging if there's only two members since we never challenge the Head or // the Founder. if members.len() > 2 { // Start a new defender rotation let phrase = b"society_challenge"; // we'll need a random seed here. let seed = T::Randomness::random(phrase); // seed needs to be guaranteed to be 32 bytes. let seed = <[u8; 32]>::decode(&mut TrailingZeroInput::new(seed.as_ref())) .expect("input is padded with zeroes; qed"); let mut rng = ChaChaRng::from_seed(seed); let chosen = pick_item(&mut rng, &members[1..members.len() - 1]) .expect("exited if members empty; qed"); <Defender<T, I>>::put(&chosen); Self::deposit_event(RawEvent::Challenged(chosen.clone())); } else { <Defender<T, I>>::kill(); } } } /// The account ID of the treasury pot. /// /// This actually does computation. If you need to keep using it, then make sure you cache the /// value and only call this once. pub fn account_id() -> T::AccountId { T::ModuleId::get().into_account() } /// The account ID of the payouts pot. This is where payouts are made from. /// /// This actually does computation. If you need to keep using it, then make sure you cache the /// value and only call this once. pub fn payouts() -> T::AccountId { T::ModuleId::get().into_sub_account(b"payouts") } /// Return the duration of the lock, in blocks, with the given number of members. /// /// This is a rather opaque calculation based on the formula here: /// https://www.desmos.com/calculator/9itkal1tce fn lock_duration(x: u32) -> T::BlockNumber { let lock_pc = 100 - 50_000 / (x + 500); Percent::from_percent(lock_pc as u8) * T::MaxLockDuration::get() } /// Get a selection of bidding accounts such that the total bids is no greater than `Pot` and /// the number of bids would not surpass `MaxMembers` if all were accepted. /// /// May be empty. pub fn take_selected( members_len: usize, pot: BalanceOf<T, I>, ) -> Vec<Bid<T::AccountId, BalanceOf<T, I>>> { let max_members = MaxMembers::<I>::get() as usize; // No more than 10 will be returned. let mut max_selections: usize = 10.min(max_members.saturating_sub(members_len)); if max_selections > 0 { // Get the number of left-most bidders whose bids add up to less than `pot`. let mut bids = <Bids<T, I>>::get(); // The list of selected candidates let mut selected = Vec::new(); if bids.len() > 0 { // Can only select at most the length of bids max_selections = max_selections.min(bids.len()); // Number of selected bids so far let mut count = 0; // Check if we have already selected a candidate with zero bid let mut zero_selected = false; // A running total of the cost to onboard these bids let mut total_cost: BalanceOf<T, I> = Zero::zero(); bids.retain(|bid| { if count < max_selections { // Handle zero bids. We only want one of them. if bid.value.is_zero() { // Select only the first zero bid if !zero_selected { selected.push(bid.clone()); zero_selected = true; count += 1; return false } } else { total_cost += bid.value; // Select only as many users as the pot can support. if total_cost <= pot { selected.push(bid.clone()); count += 1; return false } } } true }); // No need to reset Bids if we're not taking anything. if count > 0 { <Bids<T, I>>::put(bids); } } selected } else { vec![] } } } impl<T: Trait> OnUnbalanced<NegativeImbalanceOf<T>> for Module<T> { fn on_nonzero_unbalanced(amount: NegativeImbalanceOf<T>) { let numeric_amount = amount.peek(); // Must resolve into existing but better to be safe. let _ = T::Currency::resolve_creating(&Self::account_id(), amount); Self::deposit_event(RawEvent::Deposit(numeric_amount)); } }
fn main() { let y = 30; let mut x = 45; println!("La valeur de y est {} ", y); println!("La valeur de x est {} ", x); x = 60; println!("La valeur de y ne peut pas être changée"); println!("La valeur de x est {} ", x); }
use crate::bond::deposit_farm_share; use crate::contract::{handle, init, query}; use crate::mock_querier::{mock_dependencies, WasmMockQuerier}; use crate::state::{pool_info_read, pool_info_store, read_config}; use cosmwasm_std::testing::{mock_env, MockApi, MockStorage, MOCK_CONTRACT_ADDR}; use cosmwasm_std::{ from_binary, to_binary, CosmosMsg, Decimal, Extern, HumanAddr, Uint128, WasmMsg, }; use cw20::{Cw20HandleMsg, Cw20ReceiveMsg}; use pylon_token::gov::HandleMsg as PylonGovHandleMsg; use pylon_token::staking::HandleMsg as PylonStakingHandleMsg; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use spectrum_protocol::gov::HandleMsg as GovHandleMsg; use spectrum_protocol::pylon_farm::{ ConfigInfo, Cw20HookMsg, HandleMsg, PoolItem, PoolsResponse, QueryMsg, StateInfo, }; use std::fmt::Debug; const SPEC_GOV: &str = "spec_gov"; const SPEC_TOKEN: &str = "spec_token"; const MINE_GOV: &str = "mine_gov"; const MINE_TOKEN: &str = "mine_token"; const MINE_STAKING: &str = "mine_staking"; const TERRA_SWAP: &str = "terra_swap"; const TEST_CREATOR: &str = "creator"; const USER1: &str = "user1"; const USER2: &str = "user2"; const MINE_LP: &str = "mine_lp"; const SPY_TOKEN: &str = "spy_token"; const SPY_LP: &str = "spy_lp"; #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct RewardInfoResponse { pub staker_addr: HumanAddr, pub reward_infos: Vec<RewardInfoResponseItem>, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct RewardInfoResponseItem { pub asset_token: HumanAddr, pub farm_share_index: Decimal, pub auto_spec_share_index: Decimal, pub stake_spec_share_index: Decimal, pub bond_amount: Uint128, pub auto_bond_amount: Uint128, pub stake_bond_amount: Uint128, pub farm_share: Uint128, pub spec_share: Uint128, pub auto_bond_share: Uint128, pub stake_bond_share: Uint128, pub pending_farm_reward: Uint128, pub pending_spec_reward: Uint128, pub accum_spec_share: Uint128, pub locked_spec_share: Uint128, pub locked_spec_reward: Uint128, } #[test] fn test() { let mut deps = mock_dependencies(20, &[]); deps.querier.with_balance_percent(100); let _ = test_config(&mut deps); test_register_asset(&mut deps); test_bond(&mut deps); } fn test_config(deps: &mut Extern<MockStorage, MockApi, WasmMockQuerier>) -> ConfigInfo { // test init & read config & read state let env = mock_env(TEST_CREATOR, &[]); let mut config = ConfigInfo { owner: HumanAddr::from(TEST_CREATOR), spectrum_gov: HumanAddr::from(SPEC_GOV), spectrum_token: HumanAddr::from(SPEC_TOKEN), pylon_gov: HumanAddr::from(MINE_GOV), pylon_token: HumanAddr::from(MINE_TOKEN), pylon_staking: HumanAddr::from(MINE_STAKING), terraswap_factory: HumanAddr::from(TERRA_SWAP), platform: Option::None, controller: Option::None, base_denom: "uusd".to_string(), community_fee: Decimal::zero(), platform_fee: Decimal::zero(), controller_fee: Decimal::zero(), deposit_fee: Decimal::zero(), lock_start: 0u64, lock_end: 0u64, }; // success init let res = init(deps, env.clone(), config.clone()); assert!(res.is_ok()); // read config let msg = QueryMsg::config {}; let res: ConfigInfo = from_binary(&query(deps, msg).unwrap()).unwrap(); assert_eq!(res, config.clone()); // read state let msg = QueryMsg::state {}; let res: StateInfo = from_binary(&query(deps, msg).unwrap()).unwrap(); assert_eq!( res, StateInfo { previous_spec_share: Uint128::zero(), total_farm_share: Uint128::zero(), total_weight: 0u32, spec_share_index: Decimal::zero(), } ); // alter config, validate owner let env = mock_env(SPEC_GOV, &[]); let msg = HandleMsg::update_config { owner: Some(HumanAddr::from(SPEC_GOV)), platform: None, controller: None, community_fee: None, platform_fee: None, controller_fee: None, deposit_fee: None, lock_start: None, lock_end: None, }; let res = handle(deps, env.clone(), msg.clone()); assert!(res.is_err()); // success let env = mock_env(TEST_CREATOR, &[]); let res = handle(deps, env.clone(), msg); assert!(res.is_ok()); let msg = QueryMsg::config {}; let res: ConfigInfo = from_binary(&query(deps, msg).unwrap()).unwrap(); config.owner = HumanAddr::from(SPEC_GOV); assert_eq!(res, config.clone()); config } fn test_register_asset(deps: &mut Extern<MockStorage, MockApi, WasmMockQuerier>) { // no permission let env = mock_env(TEST_CREATOR, &[]); let msg = HandleMsg::register_asset { asset_token: HumanAddr::from(MINE_TOKEN), staking_token: HumanAddr::from(MINE_LP), weight: 1u32, auto_compound: true, }; let res = handle(deps, env.clone(), msg.clone()); assert!(res.is_err()); // success let env = mock_env(SPEC_GOV, &[]); let res = handle(deps, env.clone(), msg); assert!(res.is_ok()); // query pool info let msg = QueryMsg::pools {}; let res: PoolsResponse = from_binary(&query(deps, msg).unwrap()).unwrap(); assert_eq!( res, PoolsResponse { pools: vec![PoolItem { asset_token: HumanAddr::from(MINE_TOKEN), staking_token: HumanAddr::from(MINE_LP), weight: 1u32, auto_compound: true, farm_share: Uint128::zero(), state_spec_share_index: Decimal::zero(), stake_spec_share_index: Decimal::zero(), auto_spec_share_index: Decimal::zero(), farm_share_index: Decimal::zero(), total_stake_bond_amount: Uint128::zero(), total_stake_bond_share: Uint128::zero(), total_auto_bond_share: Uint128::zero(), reinvest_allowance: Uint128::zero(), }] } ); // register again should fail let msg = HandleMsg::register_asset { asset_token: HumanAddr::from(SPY_TOKEN), staking_token: HumanAddr::from(SPY_LP), weight: 1u32, auto_compound: true, }; let res = handle(deps, env.clone(), msg); assert!(res.is_err()); // read state let msg = QueryMsg::state {}; let res: StateInfo = from_binary(&query(deps, msg).unwrap()).unwrap(); assert_eq!(res.total_weight, 1u32); } fn test_bond(deps: &mut Extern<MockStorage, MockApi, WasmMockQuerier>) { // bond err let env = mock_env(TEST_CREATOR, &[]); let msg = HandleMsg::receive(Cw20ReceiveMsg { sender: HumanAddr::from(USER1), amount: Uint128::from(10000u128), msg: Some( to_binary(&Cw20HookMsg::bond { staker_addr: None, asset_token: HumanAddr::from(MINE_TOKEN), compound_rate: Some(Decimal::percent(60)), }) .unwrap(), ), }); let res = handle(deps, env.clone(), msg.clone()); assert!(res.is_err()); // bond success user1 1000 MINE-LP let env = mock_env(MINE_LP, &[]); let res = handle(deps, env.clone(), msg); assert!(res.is_ok()); let config = read_config(&deps.storage).unwrap(); let mut pool_info = pool_info_read(&deps.storage) .load(config.pylon_token.as_slice()) .unwrap(); deposit_farm_share(deps, &mut pool_info, &config, Uint128::from(500u128)).unwrap(); pool_info_store(&mut deps.storage) .save(config.pylon_token.as_slice(), &pool_info) .unwrap(); deps.querier.with_token_balances(&[ ( &HumanAddr::from(MINE_STAKING), &[( &HumanAddr::from(MOCK_CONTRACT_ADDR), &Uint128::from(10000u128), )], ), ( &HumanAddr::from(MINE_GOV), &[( &HumanAddr::from(MOCK_CONTRACT_ADDR), &Uint128::from(1000u128), )], ), ( &HumanAddr::from(SPEC_GOV), &[( &HumanAddr::from(MOCK_CONTRACT_ADDR), &Uint128::from(2700u128), )], ), ]); // query balance for user1 let msg = QueryMsg::reward_info { staker_addr: HumanAddr::from(USER1), height: 0u64, }; let res: RewardInfoResponse = from_binary(&query(deps, msg).unwrap()).unwrap(); assert_eq!( res.reward_infos, vec![RewardInfoResponseItem { asset_token: HumanAddr::from(MINE_TOKEN), pending_farm_reward: Uint128::from(1000u128), pending_spec_reward: Uint128::from(2700u128), bond_amount: Uint128::from(10000u128), auto_bond_amount: Uint128::from(6000u128), stake_bond_amount: Uint128::from(4000u128), accum_spec_share: Uint128::from(2700u128), farm_share_index: Decimal::zero(), auto_spec_share_index: Decimal::zero(), stake_spec_share_index: Decimal::zero(), farm_share: Uint128::from(500u128), spec_share: Uint128::from(2700u128), auto_bond_share: Uint128::from(6000u128), stake_bond_share: Uint128::from(4000u128), locked_spec_share: Uint128::zero(), locked_spec_reward: Uint128::zero(), },] ); // unbond 3000 MINE-LP let env = mock_env(USER1, &[]); let msg = HandleMsg::unbond { asset_token: HumanAddr::from(MINE_TOKEN), amount: Uint128::from(3000u128), }; let res = handle(deps, env.clone(), msg); assert!(res.is_ok()); assert_eq!( res.unwrap().messages, [ CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: HumanAddr::from(MINE_STAKING), send: vec![], msg: to_binary(&PylonStakingHandleMsg::Unbond { amount: Uint128::from(3000u128), }) .unwrap(), }), CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: HumanAddr::from(MINE_LP), send: vec![], msg: to_binary(&Cw20HandleMsg::Transfer { recipient: HumanAddr::from(USER1), amount: Uint128::from(3000u128), }) .unwrap(), }), ] ); // withdraw rewards let msg = HandleMsg::withdraw { asset_token: None }; let res = handle(deps, env.clone(), msg); assert!(res.is_ok()); assert_eq!( res.unwrap().messages, vec![ CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: HumanAddr::from(SPEC_GOV), send: vec![], msg: to_binary(&GovHandleMsg::withdraw { amount: Some(Uint128::from(2700u128)), }) .unwrap(), }), CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: HumanAddr::from(SPEC_TOKEN), send: vec![], msg: to_binary(&Cw20HandleMsg::Transfer { recipient: HumanAddr::from(USER1), amount: Uint128::from(2700u128), }) .unwrap(), }), CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: HumanAddr::from(MINE_GOV), send: vec![], msg: to_binary(&PylonGovHandleMsg::WithdrawVotingTokens { amount: Some(Uint128::from(1000u128)), }) .unwrap(), }), CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: HumanAddr::from(MINE_TOKEN), send: vec![], msg: to_binary(&Cw20HandleMsg::Transfer { recipient: HumanAddr::from(USER1), amount: Uint128::from(1000u128), }) .unwrap(), }), ] ); deps.querier.with_token_balances(&[ ( &HumanAddr::from(MINE_STAKING), &[( &HumanAddr::from(MOCK_CONTRACT_ADDR), &Uint128::from(7000u128), )], ), ( &HumanAddr::from(MINE_GOV), &[(&HumanAddr::from(MOCK_CONTRACT_ADDR), &Uint128::from(0u128))], ), ( &HumanAddr::from(SPEC_GOV), &[(&HumanAddr::from(MOCK_CONTRACT_ADDR), &Uint128::from(0u128))], ), ]); // query balance for user2 let msg = QueryMsg::reward_info { staker_addr: HumanAddr::from(USER2), height: 0u64, }; let res: RewardInfoResponse = from_binary(&query(deps, msg).unwrap()).unwrap(); assert_eq!(res.reward_infos, vec![]); // query balance for user1 let msg = QueryMsg::reward_info { staker_addr: HumanAddr::from(USER1), height: 0u64, }; let res: RewardInfoResponse = from_binary(&query(deps, msg).unwrap()).unwrap(); assert_eq!( res.reward_infos, vec![RewardInfoResponseItem { asset_token: HumanAddr::from(MINE_TOKEN), pending_farm_reward: Uint128::from(0u128), pending_spec_reward: Uint128::from(0u128), bond_amount: Uint128::from(7000u128), auto_bond_amount: Uint128::from(4200u128), stake_bond_amount: Uint128::from(2800u128), accum_spec_share: Uint128::from(2700u128), farm_share_index: Decimal::from_ratio(125u128, 1000u128), auto_spec_share_index: Decimal::from_ratio(270u128, 1000u128), stake_spec_share_index: Decimal::from_ratio(270u128, 1000u128), farm_share: Uint128::from(0u128), spec_share: Uint128::from(0u128), auto_bond_share: Uint128::from(4200u128), stake_bond_share: Uint128::from(2800u128), locked_spec_share: Uint128::zero(), locked_spec_reward: Uint128::zero(), },] ); // bond user2 5000 MINE-LP auto-stake let env = mock_env(MINE_LP, &[]); let msg = HandleMsg::receive(Cw20ReceiveMsg { sender: HumanAddr::from(USER2), amount: Uint128::from(5000u128), msg: Some( to_binary(&Cw20HookMsg::bond { staker_addr: None, asset_token: HumanAddr::from(MINE_TOKEN), compound_rate: None, }) .unwrap(), ), }); let res = handle(deps, env.clone(), msg); assert!(res.is_ok()); let mut pool_info = pool_info_read(&deps.storage) .load(config.pylon_token.as_slice()) .unwrap(); deposit_farm_share(deps, &mut pool_info, &config, Uint128::from(10000u128)).unwrap(); pool_info_store(&mut deps.storage) .save(config.pylon_token.as_slice(), &pool_info) .unwrap(); deps.querier.with_token_balances(&[ ( &HumanAddr::from(MINE_STAKING), &[( &HumanAddr::from(MOCK_CONTRACT_ADDR), &Uint128::from(12000u128), )], ), ( &HumanAddr::from(MINE_GOV), &[( &HumanAddr::from(MOCK_CONTRACT_ADDR), &Uint128::from(5000u128), )], ), ( &HumanAddr::from(SPEC_GOV), &[( &HumanAddr::from(MOCK_CONTRACT_ADDR), &Uint128::from(1000u128), )], ), ]); /* USER1 7000 (auto 4200, stake 2800) USER2 5000 (auto 0, stake 5000) Total lp 12000 Total farm share 7800 Farm share +10000 USER1 Farm share = 28/78 * 10000 = 3589 USER2 Farm share = 50/78 * 10000 = 6410 Farm reward 5000 USER1 Farm reward = 28/78 * 5000 = 1794 USER2 Farm reward = 50/78 * 5000 = 3205 SPEC reward +1000 USER1 SPEC reward ~ 582 USER2 SPEC reward ~ 416 */ // query balance for user1 let msg = QueryMsg::reward_info { staker_addr: HumanAddr::from(USER1), height: 0u64, }; let res: RewardInfoResponse = from_binary(&query(deps, msg).unwrap()).unwrap(); assert_eq!( res.reward_infos, vec![RewardInfoResponseItem { asset_token: HumanAddr::from(MINE_TOKEN), pending_farm_reward: Uint128::from(1794u128), pending_spec_reward: Uint128::from(582u128), bond_amount: Uint128::from(7000u128), auto_bond_amount: Uint128::from(4200u128), stake_bond_amount: Uint128::from(2800u128), accum_spec_share: Uint128::from(3282u128), farm_share_index: Decimal::from_ratio(125u128, 1000u128), auto_spec_share_index: Decimal::from_ratio(270u128, 1000u128), stake_spec_share_index: Decimal::from_ratio(270u128, 1000u128), farm_share: Uint128::from(3589u128), spec_share: Uint128::from(582u128), auto_bond_share: Uint128::from(4200u128), stake_bond_share: Uint128::from(2800u128), locked_spec_share: Uint128::zero(), locked_spec_reward: Uint128::zero(), },] ); // query balance for user2 let msg = QueryMsg::reward_info { staker_addr: HumanAddr::from(USER2), height: 0u64, }; let res: RewardInfoResponse = from_binary(&query(deps, msg).unwrap()).unwrap(); assert_eq!( res.reward_infos, vec![RewardInfoResponseItem { asset_token: HumanAddr::from(MINE_TOKEN), pending_farm_reward: Uint128::from(3205u128), pending_spec_reward: Uint128::from(416u128), bond_amount: Uint128::from(5000u128), auto_bond_amount: Uint128::from(0u128), stake_bond_amount: Uint128::from(5000u128), accum_spec_share: Uint128::from(416u128), farm_share_index: Decimal::from_ratio(125u128, 1000u128), auto_spec_share_index: Decimal::from_ratio(270u128, 1000u128), stake_spec_share_index: Decimal::from_ratio(270u128, 1000u128), farm_share: Uint128::from(6410u128), spec_share: Uint128::from(416u128), auto_bond_share: Uint128::from(0u128), stake_bond_share: Uint128::from(5000u128), locked_spec_share: Uint128::zero(), locked_spec_reward: Uint128::zero(), },] ); }
#[doc = "Reader of register CH8_DBG_TCR"] pub type R = crate::R<u32, super::CH8_DBG_TCR>; impl R {}
// Copyright 2014-2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![warn(clippy::all)] #![allow(clippy::blacklisted_name, unused_assignments)] struct Foo(u32); fn array() { let mut foo = [1, 2]; let temp = foo[0]; foo[0] = foo[1]; foo[1] = temp; foo.swap(0, 1); } fn slice() { let foo = &mut [1, 2]; let temp = foo[0]; foo[0] = foo[1]; foo[1] = temp; foo.swap(0, 1); } fn vec() { let mut foo = vec![1, 2]; let temp = foo[0]; foo[0] = foo[1]; foo[1] = temp; foo.swap(0, 1); } #[rustfmt::skip] fn main() { array(); slice(); vec(); let mut a = 42; let mut b = 1337; a = b; b = a; ; let t = a; a = b; b = t; let mut c = Foo(42); c.0 = a; a = c.0; ; let t = c.0; c.0 = a; a = t; }
use failure::{Error, ResultExt}; use std::str; use std::fs::File; use std::io::prelude::*; use std::time::{Duration, SystemTime}; use error::ServiceError; const UPTIME_FILE: &'static str = "/proc/uptime"; const NET_DEV_FILE: &'static str = "/proc/net/dev"; #[derive(Debug)] pub struct Interface { pub name: String, pub received_bytes: u64, pub received_packets: u64, pub received_errs: u64, pub received_drop: u64, pub received_fifo: u64, pub received_frame: u64, pub received_compressed: u64, pub received_multicast: u64, pub transmit_bytes: u64, pub transmit_packets: u64, pub transmit_errs: u64, pub transmit_drop: u64, pub transmit_fifo: u64, pub transmit_colls: u64, pub transmit_carrier: u64, pub transmit_compressed: u64, } pub fn get_startup_time() -> Result<SystemTime, Error> { let mut file = File::open(UPTIME_FILE).context(ServiceError::MissingFileError(String::from(UPTIME_FILE)))?; let mut buffer = Vec::new(); file.read_to_end(&mut buffer) .context(ServiceError::MissingFileError(String::from(UPTIME_FILE)))?; let file_content = str::from_utf8(&buffer); let uptime_sec = file_content .unwrap() .split_whitespace() .collect::<Vec<&str>>()[0] .parse::<f64>() .unwrap(); Ok(SystemTime::now() - Duration::from_secs(uptime_sec as u64)) } pub fn read_interfaces() -> Result<Vec<Interface>, Error> { let mut file = File::open(NET_DEV_FILE) .context(ServiceError::MissingFileError(String::from(NET_DEV_FILE)))?; let mut buffer = Vec::new(); file.read_to_end(&mut buffer) .context(ServiceError::MissingFileError(String::from(NET_DEV_FILE)))?; let file_content = str::from_utf8(&buffer); let split = file_content.unwrap().split("\n"); let mut vec = split.collect::<Vec<&str>>(); vec.drain(0..2); let mut interfaces: Vec<Interface> = Vec::new(); for s in vec { if s.len() == 0 { continue; } let vars = s.split_whitespace().collect::<Vec<&str>>(); let interface = Interface { name: vars[0].to_string().replace(":", ""), received_bytes: vars[1].parse::<u64>().unwrap(), received_packets: vars[2].parse::<u64>().unwrap(), received_errs: vars[3].parse::<u64>().unwrap(), received_drop: vars[4].parse::<u64>().unwrap(), received_fifo: vars[5].parse::<u64>().unwrap(), received_frame: vars[6].parse::<u64>().unwrap(), received_compressed: vars[7].parse::<u64>().unwrap(), received_multicast: vars[8].parse::<u64>().unwrap(), transmit_bytes: vars[9].parse::<u64>().unwrap(), transmit_packets: vars[10].parse::<u64>().unwrap(), transmit_errs: vars[11].parse::<u64>().unwrap(), transmit_drop: vars[12].parse::<u64>().unwrap(), transmit_fifo: vars[13].parse::<u64>().unwrap(), transmit_colls: vars[14].parse::<u64>().unwrap(), transmit_carrier: vars[15].parse::<u64>().unwrap(), transmit_compressed: vars[16].parse::<u64>().unwrap(), }; interfaces.push(interface); } Ok(interfaces) }
extern crate connect_4; use connect_4::*; use std::io; fn main() { let mut game = Connect4::new(); let mut player = Player::Red; while game.state() != State::Won { print!("{}", game.to_string()); match take_input(player) { column @ 1...7 => { match game.play(player, column) { Err(e) => { println!("{}", e); continue; }, _ => {}, } player = match player { Player::Red => Player::Yellow, Player::Yellow => Player::Red, } }, _ => println!("Can't play there."), } } println!("\nYou win, {:?}.{}", game.winner.unwrap(), game.to_string()); } fn take_input(player: Player) -> usize { println!("Where would you like to play, {}?", player.pretty_print()); let mut input = String::new(); io::stdin().read_line(&mut input) .expect("Failure to read line."); let input_number: usize = match input.trim().parse() { Ok(x) => x, Err(_) => take_input(player), }; input_number }
#![no_std] #![no_main] extern crate alloc; use core::convert::TryFrom; use alloc::{ string::{String, ToString}, vec::Vec, }; use casper_contract::{ contract_api::{runtime, storage}, unwrap_or_revert::UnwrapOrRevert, }; use casper_types::{ account::AccountHash, contracts::{ EntryPoint, EntryPointAccess, EntryPointType, EntryPoints, NamedKeys, CONTRACT_INITIAL_VERSION, }, runtime_args, BlockTime, CLType, ContractHash, ContractPackageHash, ContractVersion, Key, RuntimeArgs, U256, }; const PACKAGE_HASH_KEY: &str = "package_hash_key"; const PACKAGE_ACCESS_KEY: &str = "package_access_key"; const CONTRACT_HASH_KEY: &str = "contract_hash_key"; const CONTRACT_CODE: &str = "contract_code_test"; const SESSION_CODE: &str = "session_code_test"; const NEW_KEY: &str = "new_key"; const NAMED_KEY: &str = "contract_named_key"; const CONTRACT_VERSION: &str = "contract_version"; #[no_mangle] pub extern "C" fn call() { // Session contract let a: U256 = U256::one(); let value: String = a.to_string(); let actual_block_time: BlockTime = runtime::get_blocktime(); let b: u64 = actual_block_time.into(); runtime::put_key("stringvalue", storage::new_uref(value).into()); runtime::put_key("blocktimevalue", storage::new_uref(b).into()); let a = Key::from_formatted_str("hash-xxxx").unwrap(); let b = Key::from(a); let c = b; }
use std::fmt; trait Visitor { fn visit_file(&self, file: Box<&File>, cur_dir: String); fn visit_directory(&self, directory: Box<&Directory>, cur_dir: String); } trait Element { fn accept(&self, v: Box<&Visitor>, cur_dir: String); } trait Entry : Element { fn get_name(&self) -> String; fn get_size(&self) -> u32; } struct File { name: String, size: u32, } impl File { fn new(name: String, size: u32) -> File { File { name: name, size: size, } } } impl Element for File { fn accept(&self, v: Box<&Visitor>, cur_dir: String) { v.visit_file(Box::new(self), cur_dir); } } impl Entry for File { fn get_name(&self) -> String { self.name.clone() } fn get_size(&self) -> u32 { self.size } } impl fmt::Display for File { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{} ({})", self.get_name(), self.get_size()) } } struct Directory { name: String, dir: Vec<Box<Entry>> } impl Directory { fn new(name: String) -> Directory { Directory { name: name, dir: Vec::new(), } } fn add(&mut self, entry: Box<Entry>) { self.dir.push(entry); } } impl Element for Directory { fn accept(&self, v: Box<&Visitor>, cur_dir: String) { v.visit_directory(Box::new(self), cur_dir); } } impl Entry for Directory { fn get_name(&self) -> String { self.name.clone() } fn get_size(&self) -> u32{ let mut size = 0; for entry in &self.dir { size += entry.get_size(); } size } } impl fmt::Display for Directory { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{} ({})", self.get_name(), self.get_size()) } } struct ListVisitor {} impl ListVisitor { fn new() -> ListVisitor { ListVisitor {} } } impl Visitor for ListVisitor { fn visit_file(&self, file: Box<&File>, cur_dir: String) { println!("{}/{}", cur_dir, file); } fn visit_directory(&self, directory: Box<&Directory>, cur_dir: String) { println!("{}/{}", cur_dir, directory); let current_dir = if cur_dir == "".to_string() { format!("/{}", directory.get_name()) } else { format!("{}/{}", cur_dir, directory.get_name()) }; for entry in &directory.dir { entry.accept(Box::new(self), current_dir.clone()); } } } fn main() { println!("Making root entries..."); let mut root_dir = Box::new(Directory::new("root".to_string())); let mut bin_dir = Box::new(Directory::new("bin".to_string())); let tmp_dir = Box::new(Directory::new("tmp".to_string())); let mut usr_dir = Box::new(Directory::new("usr".to_string())); bin_dir.add(Box::new(File::new("vi".to_string(), 10000))); bin_dir.add(Box::new(File::new("latex".to_string(), 20000))); root_dir.add(bin_dir); root_dir.add(tmp_dir); root_dir.accept(Box::new(&ListVisitor::new()), "".to_string()); println!(); println!("Making user entries..."); let mut yuki = Box::new(Directory::new("yuki".to_string())); yuki.add(Box::new(File::new("diary.html".to_string(), 100))); yuki.add(Box::new(File::new("Composite.java".to_string(), 200))); let mut hanako = Box::new(Directory::new("hanako".to_string())); hanako.add(Box::new(File::new("memo.tex".to_string(), 300))); let mut tomura = Box::new(Directory::new("tomura".to_string())); tomura.add(Box::new(File::new("game.doc".to_string(), 400))); tomura.add(Box::new(File::new("jumk.mail".to_string(), 500))); usr_dir.add(yuki); usr_dir.add(hanako); usr_dir.add(tomura); root_dir.add(usr_dir); root_dir.accept(Box::new(&ListVisitor::new()), "".to_string()); }
use bonuses; /// An object which tracks natural armor enhancement bonus values. pub struct NaturalArmorEnhancementBonus { tracker: bonuses::NonStackingTracker, } impl NaturalArmorEnhancementBonus { /// Create an instance of NaturalArmorEnhancementBonus. pub fn new() -> NaturalArmorEnhancementBonus { NaturalArmorEnhancementBonus { tracker: bonuses::NonStackingTracker::new() } } } impl bonuses::BonusTracker for NaturalArmorEnhancementBonus { /// Returns the total bonus. fn total(&self) -> u8 { return self.tracker.total(); } /// Adds a value. fn add(&mut self, amt: u8) -> &mut NaturalArmorEnhancementBonus { self.tracker.add(amt); return self; } /// Removes a value. fn remove(&mut self, amt: u8) -> &mut NaturalArmorEnhancementBonus { self.tracker.remove(amt); return self; } }
#[derive(Debug)] enum KeypadDirective { Up, Right, Down, Left, } type Button = Option<char>; #[derive(Debug)] pub struct Keypad { buttons: [[Button; 5]; 5], position: (usize, usize), } impl Keypad { pub fn new(position: (usize, usize)) -> Keypad { Keypad { buttons: [[None, None, Some('1'), None, None], [None, Some('2'), Some('3'), Some('4'), None], [Some('5'), Some('6'), Some('7'), Some('8'), Some('9')], [None, Some('A'), Some('B'), Some('C'), None], [None, None, Some('D'), None, None]], position: position, } } pub fn adjust(&self, directive: char) -> Keypad { let directive = self.keypad_directive_from(directive); let new_position = match directive { KeypadDirective::Up => { let new_x = match self.position.0 { 0 => 0, n => n - 1, }; (new_x, self.position.1) } KeypadDirective::Right => { let new_y = match self.position.1 { 4 => 4, n => n + 1, }; (self.position.0, new_y) } KeypadDirective::Down => { let new_x = match self.position.0 { 4 => 4, n => n + 1, }; (new_x, self.position.1) } KeypadDirective::Left => { let new_y = match self.position.1 { 0 => 0, n => n - 1, }; (self.position.0, new_y) } }; let potential_new_keypad = Keypad::new(new_position); match potential_new_keypad.current_button() { Some(_) => potential_new_keypad, None => Keypad::new(self.position), } } pub fn current_button(&self) -> Button { self.buttons[self.position.0][self.position.1] } fn keypad_directive_from(&self, raw_directive: char) -> KeypadDirective { match raw_directive { 'U' => KeypadDirective::Up, 'R' => KeypadDirective::Right, 'D' => KeypadDirective::Down, 'L' => KeypadDirective::Left, _ => panic!("BAD INPUT"), } } }
#[doc = "Register `TXESC` reader"] pub type R = crate::R<TXESC_SPEC>; #[doc = "Register `TXESC` writer"] pub type W = crate::W<TXESC_SPEC>; #[doc = "Field `TBDS` reader - Tx Buffer Data Field Size:"] pub type TBDS_R = crate::FieldReader; #[doc = "Field `TBDS` writer - Tx Buffer Data Field Size:"] pub type TBDS_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>; impl R { #[doc = "Bits 0:2 - Tx Buffer Data Field Size:"] #[inline(always)] pub fn tbds(&self) -> TBDS_R { TBDS_R::new((self.bits & 7) as u8) } } impl W { #[doc = "Bits 0:2 - Tx Buffer Data Field Size:"] #[inline(always)] #[must_use] pub fn tbds(&mut self) -> TBDS_W<TXESC_SPEC, 0> { TBDS_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "FDCAN Tx Buffer Element Size Configuration Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`txesc::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`txesc::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct TXESC_SPEC; impl crate::RegisterSpec for TXESC_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`txesc::R`](R) reader structure"] impl crate::Readable for TXESC_SPEC {} #[doc = "`write(|w| ..)` method takes [`txesc::W`](W) writer structure"] impl crate::Writable for TXESC_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets TXESC to value 0"] impl crate::Resettable for TXESC_SPEC { const RESET_VALUE: Self::Ux = 0; }
use crate::{NumBytes, Read, Write}; /// TODO Read, Write, `NumBytes` needs a custom implementation based on `fixed_bytes` #[derive(Read, Write, NumBytes, Clone, Copy)] #[eosio_core_root_path = "crate"] pub struct Checksum512([u8; 64]); impl Checksum512 { pub fn as_bytes(&self) -> &[u8] { &self.0 } pub const fn to_bytes(&self) -> [u8; 64] { self.0 } } impl From<[u8; 64]> for Checksum512 { #[inline] fn from(value: [u8; 64]) -> Self { Self(value) } } impl From<Checksum512> for [u8; 64] { #[inline] fn from(value: Checksum512) -> Self { value.0 } }
use super::handler::Handler; use super::user::User; use crate::errors::SmileError; use crate::graphql::schema::Context; use crate::schema::blog_info; use async_std::task; use diesel::{prelude::*, update}; #[derive( juniper::GraphQLObject, Debug, Queryable, AsChangeset, Identifiable, Serialize, Deserialize, PartialEq, Associations, )] #[primary_key(name)] #[table_name = "blog_info"] pub struct InfoSchema { pub name: String, pub description: String, } impl InfoSchema { pub fn get(conn: &MysqlConnection) -> Result<Self, SmileError> { blog_info::table.first::<InfoSchema>(conn).map_err(SmileError::from) } pub fn set(context: &Context, values: InfoSchema) -> Result<bool, SmileError> { let conn: &MysqlConnection = &context.conn; task::block_on(async { let user: Box<User> = User::find_by_id(&context.user_id.to_owned().unwrap(), conn).unwrap(); return if user.isAdmin.unwrap() { update(blog_info::table) .set(values) .execute(conn) .map(|_| true) .map_err(SmileError::from) } else { Err(SmileError::Unauthorized) }; }) } }
fn main() { // Slices -> Heap // Arrays -> stack let mensaje = String::from("Hola mundo, desde el curso de Rust!"); //let hola = &mensaje[0..4]; // [star..end] let hola = &mensaje[..4]; let resto_mensaje = &mensaje[4..]; let mensaje_completo = &mensaje[..]; println!("El mensaje es: {}", mensaje); println!("El slice es: {}", hola); println!("El slice es: {}", resto_mensaje); println!("El slice es: {}", mensaje_completo); }
use std::path::PathBuf; use crate::cli::{CmdError, CmdHandler, CmdResult}; pub struct InitCommand { with_git: bool, } impl InitCommand { pub fn new(with_git: bool) -> InitCommand { InitCommand { with_git } } } impl CmdHandler for InitCommand { fn exec(&self, _path: PathBuf) -> CmdResult { Err(CmdError::UnknownError.into()) } }
struct Client { addr: String, password: Option<String>, max_idle_conns: usize, } impl Client { fn new(addr: String, password: Option<String>) -> Client { Client { addr: addr, password: password, max_idle_conns: 4, } } }
// This file was generated by gir (https://github.com/gtk-rs/gir) // from gir-files (https://github.com/gtk-rs/gir-files) // DO NOT EDIT use glib::error::ErrorDomain; use glib::translate::*; use glib::value::FromValue; use glib::value::FromValueOptional; use glib::value::SetValue; use glib::value::Value; use glib::Quark; use glib::StaticType; use glib::Type; use gobject_sys; use std::fmt; use vte_sys; #[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Clone, Copy)] #[non_exhaustive] pub enum CursorBlinkMode { System, On, Off, #[doc(hidden)] __Unknown(i32), } impl fmt::Display for CursorBlinkMode { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "CursorBlinkMode::{}", match *self { CursorBlinkMode::System => "System", CursorBlinkMode::On => "On", CursorBlinkMode::Off => "Off", _ => "Unknown", }) } } #[doc(hidden)] impl ToGlib for CursorBlinkMode { type GlibType = vte_sys::VteCursorBlinkMode; fn to_glib(&self) -> vte_sys::VteCursorBlinkMode { match *self { CursorBlinkMode::System => vte_sys::VTE_CURSOR_BLINK_SYSTEM, CursorBlinkMode::On => vte_sys::VTE_CURSOR_BLINK_ON, CursorBlinkMode::Off => vte_sys::VTE_CURSOR_BLINK_OFF, CursorBlinkMode::__Unknown(value) => value } } } #[doc(hidden)] impl FromGlib<vte_sys::VteCursorBlinkMode> for CursorBlinkMode { fn from_glib(value: vte_sys::VteCursorBlinkMode) -> Self { skip_assert_initialized!(); match value { 0 => CursorBlinkMode::System, 1 => CursorBlinkMode::On, 2 => CursorBlinkMode::Off, value => CursorBlinkMode::__Unknown(value), } } } impl StaticType for CursorBlinkMode { fn static_type() -> Type { unsafe { from_glib(vte_sys::vte_cursor_blink_mode_get_type()) } } } impl<'a> FromValueOptional<'a> for CursorBlinkMode { unsafe fn from_value_optional(value: &Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for CursorBlinkMode { unsafe fn from_value(value: &Value) -> Self { from_glib(gobject_sys::g_value_get_enum(value.to_glib_none().0)) } } impl SetValue for CursorBlinkMode { unsafe fn set_value(value: &mut Value, this: &Self) { gobject_sys::g_value_set_enum(value.to_glib_none_mut().0, this.to_glib()) } } #[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Clone, Copy)] #[non_exhaustive] pub enum CursorShape { Block, Ibeam, Underline, #[doc(hidden)] __Unknown(i32), } impl fmt::Display for CursorShape { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "CursorShape::{}", match *self { CursorShape::Block => "Block", CursorShape::Ibeam => "Ibeam", CursorShape::Underline => "Underline", _ => "Unknown", }) } } #[doc(hidden)] impl ToGlib for CursorShape { type GlibType = vte_sys::VteCursorShape; fn to_glib(&self) -> vte_sys::VteCursorShape { match *self { CursorShape::Block => vte_sys::VTE_CURSOR_SHAPE_BLOCK, CursorShape::Ibeam => vte_sys::VTE_CURSOR_SHAPE_IBEAM, CursorShape::Underline => vte_sys::VTE_CURSOR_SHAPE_UNDERLINE, CursorShape::__Unknown(value) => value } } } #[doc(hidden)] impl FromGlib<vte_sys::VteCursorShape> for CursorShape { fn from_glib(value: vte_sys::VteCursorShape) -> Self { skip_assert_initialized!(); match value { 0 => CursorShape::Block, 1 => CursorShape::Ibeam, 2 => CursorShape::Underline, value => CursorShape::__Unknown(value), } } } impl StaticType for CursorShape { fn static_type() -> Type { unsafe { from_glib(vte_sys::vte_cursor_shape_get_type()) } } } impl<'a> FromValueOptional<'a> for CursorShape { unsafe fn from_value_optional(value: &Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for CursorShape { unsafe fn from_value(value: &Value) -> Self { from_glib(gobject_sys::g_value_get_enum(value.to_glib_none().0)) } } impl SetValue for CursorShape { unsafe fn set_value(value: &mut Value, this: &Self) { gobject_sys::g_value_set_enum(value.to_glib_none_mut().0, this.to_glib()) } } #[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Clone, Copy)] #[non_exhaustive] pub enum EraseBinding { Auto, AsciiBackspace, AsciiDelete, DeleteSequence, Tty, #[doc(hidden)] __Unknown(i32), } impl fmt::Display for EraseBinding { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "EraseBinding::{}", match *self { EraseBinding::Auto => "Auto", EraseBinding::AsciiBackspace => "AsciiBackspace", EraseBinding::AsciiDelete => "AsciiDelete", EraseBinding::DeleteSequence => "DeleteSequence", EraseBinding::Tty => "Tty", _ => "Unknown", }) } } #[doc(hidden)] impl ToGlib for EraseBinding { type GlibType = vte_sys::VteEraseBinding; fn to_glib(&self) -> vte_sys::VteEraseBinding { match *self { EraseBinding::Auto => vte_sys::VTE_ERASE_AUTO, EraseBinding::AsciiBackspace => vte_sys::VTE_ERASE_ASCII_BACKSPACE, EraseBinding::AsciiDelete => vte_sys::VTE_ERASE_ASCII_DELETE, EraseBinding::DeleteSequence => vte_sys::VTE_ERASE_DELETE_SEQUENCE, EraseBinding::Tty => vte_sys::VTE_ERASE_TTY, EraseBinding::__Unknown(value) => value } } } #[doc(hidden)] impl FromGlib<vte_sys::VteEraseBinding> for EraseBinding { fn from_glib(value: vte_sys::VteEraseBinding) -> Self { skip_assert_initialized!(); match value { 0 => EraseBinding::Auto, 1 => EraseBinding::AsciiBackspace, 2 => EraseBinding::AsciiDelete, 3 => EraseBinding::DeleteSequence, 4 => EraseBinding::Tty, value => EraseBinding::__Unknown(value), } } } impl StaticType for EraseBinding { fn static_type() -> Type { unsafe { from_glib(vte_sys::vte_erase_binding_get_type()) } } } impl<'a> FromValueOptional<'a> for EraseBinding { unsafe fn from_value_optional(value: &Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for EraseBinding { unsafe fn from_value(value: &Value) -> Self { from_glib(gobject_sys::g_value_get_enum(value.to_glib_none().0)) } } impl SetValue for EraseBinding { unsafe fn set_value(value: &mut Value, this: &Self) { gobject_sys::g_value_set_enum(value.to_glib_none_mut().0, this.to_glib()) } } #[cfg(any(feature = "v0_50", feature = "dox"))] #[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Clone, Copy)] #[non_exhaustive] pub enum Format { Text, Html, #[doc(hidden)] __Unknown(i32), } #[cfg(any(feature = "v0_50", feature = "dox"))] impl fmt::Display for Format { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Format::{}", match *self { Format::Text => "Text", Format::Html => "Html", _ => "Unknown", }) } } #[cfg(any(feature = "v0_50", feature = "dox"))] #[doc(hidden)] impl ToGlib for Format { type GlibType = vte_sys::VteFormat; fn to_glib(&self) -> vte_sys::VteFormat { match *self { Format::Text => vte_sys::VTE_FORMAT_TEXT, Format::Html => vte_sys::VTE_FORMAT_HTML, Format::__Unknown(value) => value } } } #[cfg(any(feature = "v0_50", feature = "dox"))] #[doc(hidden)] impl FromGlib<vte_sys::VteFormat> for Format { fn from_glib(value: vte_sys::VteFormat) -> Self { skip_assert_initialized!(); match value { 1 => Format::Text, 2 => Format::Html, value => Format::__Unknown(value), } } } #[cfg(any(feature = "v0_50", feature = "dox"))] impl StaticType for Format { fn static_type() -> Type { unsafe { from_glib(vte_sys::vte_format_get_type()) } } } #[cfg(any(feature = "v0_50", feature = "dox"))] impl<'a> FromValueOptional<'a> for Format { unsafe fn from_value_optional(value: &Value) -> Option<Self> { Some(FromValue::from_value(value)) } } #[cfg(any(feature = "v0_50", feature = "dox"))] impl<'a> FromValue<'a> for Format { unsafe fn from_value(value: &Value) -> Self { from_glib(gobject_sys::g_value_get_enum(value.to_glib_none().0)) } } #[cfg(any(feature = "v0_50", feature = "dox"))] impl SetValue for Format { unsafe fn set_value(value: &mut Value, this: &Self) { gobject_sys::g_value_set_enum(value.to_glib_none_mut().0, this.to_glib()) } } #[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Clone, Copy)] #[non_exhaustive] pub enum PtyError { PtyHelperFailed, Pty98Failed, #[doc(hidden)] __Unknown(i32), } impl fmt::Display for PtyError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "PtyError::{}", match *self { PtyError::PtyHelperFailed => "PtyHelperFailed", PtyError::Pty98Failed => "Pty98Failed", _ => "Unknown", }) } } #[doc(hidden)] impl ToGlib for PtyError { type GlibType = vte_sys::VtePtyError; fn to_glib(&self) -> vte_sys::VtePtyError { match *self { PtyError::PtyHelperFailed => vte_sys::VTE_PTY_ERROR_PTY_HELPER_FAILED, PtyError::Pty98Failed => vte_sys::VTE_PTY_ERROR_PTY98_FAILED, PtyError::__Unknown(value) => value } } } #[doc(hidden)] impl FromGlib<vte_sys::VtePtyError> for PtyError { fn from_glib(value: vte_sys::VtePtyError) -> Self { skip_assert_initialized!(); match value { 0 => PtyError::PtyHelperFailed, 1 => PtyError::Pty98Failed, value => PtyError::__Unknown(value), } } } impl ErrorDomain for PtyError { fn domain() -> Quark { skip_assert_initialized!(); unsafe { from_glib(vte_sys::vte_pty_error_quark()) } } fn code(self) -> i32 { self.to_glib() } fn from(code: i32) -> Option<Self> { skip_assert_initialized!(); match code { 0 => Some(PtyError::PtyHelperFailed), 1 => Some(PtyError::Pty98Failed), value => Some(PtyError::__Unknown(value)), } } } impl StaticType for PtyError { fn static_type() -> Type { unsafe { from_glib(vte_sys::vte_pty_error_get_type()) } } } impl<'a> FromValueOptional<'a> for PtyError { unsafe fn from_value_optional(value: &Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for PtyError { unsafe fn from_value(value: &Value) -> Self { from_glib(gobject_sys::g_value_get_enum(value.to_glib_none().0)) } } impl SetValue for PtyError { unsafe fn set_value(value: &mut Value, this: &Self) { gobject_sys::g_value_set_enum(value.to_glib_none_mut().0, this.to_glib()) } } #[cfg(any(feature = "v0_52", feature = "dox"))] #[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Clone, Copy)] #[non_exhaustive] pub enum TextBlinkMode { Never, Focused, Unfocused, Always, #[doc(hidden)] __Unknown(i32), } #[cfg(any(feature = "v0_52", feature = "dox"))] impl fmt::Display for TextBlinkMode { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "TextBlinkMode::{}", match *self { TextBlinkMode::Never => "Never", TextBlinkMode::Focused => "Focused", TextBlinkMode::Unfocused => "Unfocused", TextBlinkMode::Always => "Always", _ => "Unknown", }) } } #[cfg(any(feature = "v0_52", feature = "dox"))] #[doc(hidden)] impl ToGlib for TextBlinkMode { type GlibType = vte_sys::VteTextBlinkMode; fn to_glib(&self) -> vte_sys::VteTextBlinkMode { match *self { TextBlinkMode::Never => vte_sys::VTE_TEXT_BLINK_NEVER, TextBlinkMode::Focused => vte_sys::VTE_TEXT_BLINK_FOCUSED, TextBlinkMode::Unfocused => vte_sys::VTE_TEXT_BLINK_UNFOCUSED, TextBlinkMode::Always => vte_sys::VTE_TEXT_BLINK_ALWAYS, TextBlinkMode::__Unknown(value) => value } } } #[cfg(any(feature = "v0_52", feature = "dox"))] #[doc(hidden)] impl FromGlib<vte_sys::VteTextBlinkMode> for TextBlinkMode { fn from_glib(value: vte_sys::VteTextBlinkMode) -> Self { skip_assert_initialized!(); match value { 0 => TextBlinkMode::Never, 1 => TextBlinkMode::Focused, 2 => TextBlinkMode::Unfocused, 3 => TextBlinkMode::Always, value => TextBlinkMode::__Unknown(value), } } } #[cfg(any(feature = "v0_52", feature = "dox"))] impl StaticType for TextBlinkMode { fn static_type() -> Type { unsafe { from_glib(vte_sys::vte_text_blink_mode_get_type()) } } } #[cfg(any(feature = "v0_52", feature = "dox"))] impl<'a> FromValueOptional<'a> for TextBlinkMode { unsafe fn from_value_optional(value: &Value) -> Option<Self> { Some(FromValue::from_value(value)) } } #[cfg(any(feature = "v0_52", feature = "dox"))] impl<'a> FromValue<'a> for TextBlinkMode { unsafe fn from_value(value: &Value) -> Self { from_glib(gobject_sys::g_value_get_enum(value.to_glib_none().0)) } } #[cfg(any(feature = "v0_52", feature = "dox"))] impl SetValue for TextBlinkMode { unsafe fn set_value(value: &mut Value, this: &Self) { gobject_sys::g_value_set_enum(value.to_glib_none_mut().0, this.to_glib()) } } #[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Clone, Copy)] #[non_exhaustive] pub enum WriteFlags { Default, #[doc(hidden)] __Unknown(i32), } impl fmt::Display for WriteFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "WriteFlags::{}", match *self { WriteFlags::Default => "Default", _ => "Unknown", }) } } #[doc(hidden)] impl ToGlib for WriteFlags { type GlibType = vte_sys::VteWriteFlags; fn to_glib(&self) -> vte_sys::VteWriteFlags { match *self { WriteFlags::Default => vte_sys::VTE_WRITE_DEFAULT, WriteFlags::__Unknown(value) => value } } } #[doc(hidden)] impl FromGlib<vte_sys::VteWriteFlags> for WriteFlags { fn from_glib(value: vte_sys::VteWriteFlags) -> Self { skip_assert_initialized!(); match value { 0 => WriteFlags::Default, value => WriteFlags::__Unknown(value), } } } impl StaticType for WriteFlags { fn static_type() -> Type { unsafe { from_glib(vte_sys::vte_write_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for WriteFlags { unsafe fn from_value_optional(value: &Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for WriteFlags { unsafe fn from_value(value: &Value) -> Self { from_glib(gobject_sys::g_value_get_enum(value.to_glib_none().0)) } } impl SetValue for WriteFlags { unsafe fn set_value(value: &mut Value, this: &Self) { gobject_sys::g_value_set_enum(value.to_glib_none_mut().0, this.to_glib()) } }
use std::mem; use utils::vec3::Vec3; use utils::ray::Ray; use utils::hitable::{Hitable, HitRecord}; #[derive(Clone, Default, Debug)] pub struct AABB { min: Vec3, max: Vec3, } impl AABB { pub fn new(a: Vec3, b: Vec3) -> Self { Self { min: a, max: b } } pub fn min(self) -> Vec3 { self.min } pub fn max(self) -> Vec3 { self.max } } impl Hitable for AABB { fn box_clone(&self) -> Box<Hitable> { Box::new((*self).clone()) } fn hit(&self, r: &Ray, tmin: f32, tmax: f32, _rec: &mut HitRecord) -> bool { for a in 0..3 { // let t0 = ffmin((self.min.e[a] - r.origin().e[a]) / r.direction().e[a], // (self.max.e[a] - r.origin().e[a]) / r.direction().e[a]); // let t1 = ffmax((self.min.e[a] - r.origin().e[a]) / r.direction().e[a], // (self.max.e[a] - r.origin().e[a]) / r.direction().e[a]); // let tmin = ffmax(t0, tmin); // let tmax = ffmax(t1, tmax); let inv_d = 1. / r.direction().e[a]; let mut t0 = (self.clone().min().e[a] - r.origin().e[a]) * inv_d; let mut t1 = (self.clone().max().e[a] - r.origin().e[a]) * inv_d; if inv_d < 0. { mem::swap(&mut t0, &mut t1); } let tmin = if t0 > tmin { t0 } else { tmin }; let tmax = if t1 < tmax { t1 } else { tmax }; if tmax <= tmin { return false; } } true } fn bounding_box(&self, _t0: f32, _t1: f32, _vox: &mut AABB) -> bool { false } } #[allow(dead_code)] fn ffmin(a: f32, b: f32) -> f32 { if a < b { a } else { b } } #[allow(dead_code)] fn ffmax(a: f32, b: f32) -> f32 { if a > b { a } else { b } } pub fn surrounding_box(b0: AABB, b1: AABB) -> AABB { let min = Vec3::new(b0.clone().min().x().min(b1.clone().min().x()), b0.clone().min().y().min(b1.clone().min().y()), b0.clone().min().z().min(b1.clone().min().z())); let max = Vec3::new(b0.clone().max().x().max(b1.clone().max().x()), b0.clone().max().y().max(b1.clone().max().y()), b0.max().z().max(b1.max().z())); AABB { min: min, max: max, } }
use sdl2::EventPump; use std::collections::hash_map::HashMap; use std::vec::Vec; extern crate sdl2; use sdl2::image::LoadTexture; use sdl2::render::TextureCreator; use sdl2::video::WindowContext; use sdl2::{event::Event, image::InitFlag, keyboard::Keycode, render::Canvas, video::Window}; use crate::engine::basic_types::*; use crate::engine::*; pub struct TexturesCache<'a> { textures: HashMap<String, sdl2::render::Texture<'a>>, } impl<'a> TexturesCache<'a> { pub fn new() -> Self { TexturesCache { textures: HashMap::new(), } } pub fn load_texture( &mut self, path: String, creator: &'a TextureCreator<WindowContext>, ) -> Result<(), String> { let texture = creator.load_texture(path.clone())?; self.textures.insert(String::from(path), texture); Ok(()) } pub fn get(&self, path: &String) -> Result<&sdl2::render::Texture<'a>, String> { match self.textures.get(path) { Some(t) => Ok(t), None => Err(err::TEXTURE_NOT_FOUND.to_string() + " " + path), } } } pub struct SdlContext { pub canvas: Canvas<Window>, events: EventPump, } impl SdlContext { pub fn new(title: &'static str, width: u32, height: u32) -> Result<Self, String> { let sdl = sdl2::init()?; let _img = sdl2::image::init(InitFlag::PNG)?; let vid_s = sdl.video()?; let events = sdl.event_pump()?; let window = vid_s .window(title, width, height) .position_centered() .build() .map_err(|e| e.to_string())?; let canvas = window .into_canvas() .accelerated() .build() .map_err(|e| e.to_string())?; Ok(SdlContext { canvas: canvas, events: events, }) } } pub struct SdlHandler<'a, 'b> { ctx: &'a mut SdlContext, cache: &'a mut TexturesCache<'b>, } impl<'a, 'b> SdlHandler<'a, 'b> { pub fn new( ctx: &'a mut SdlContext, cache: &'a mut TexturesCache<'b>, _fps_limit: u32, ) -> SdlHandler<'a, 'b> { SdlHandler { ctx: ctx, cache: cache, } } } impl<'a, 'b> DirectMedia for SdlHandler<'a, 'b> { fn init(&mut self) -> Result<(), String> { Ok(()) } fn clean_canvas(&mut self) { self.ctx.canvas.clear(); } fn draw_elements(&mut self, obj: &dyn Drawable) -> Result<(), String> { obj.draw(self)?; Ok(()) } fn process_events(&mut self) -> Result<Vec<basic_types::Event>, String>{ let mut result: Vec<basic_types::Event> = Vec::new(); for event in self.ctx.events.poll_iter() { match event { Event::Quit { .. }| Event::KeyDown {keycode: Some(Keycode::Escape),..} => return Err(err::USER_EXIT.to_string()), Event::KeyDown {keycode: Some(Keycode::Left),..} => result.push(basic_types::Event::Left), Event::KeyDown {keycode: Some(Keycode::Right),..} => result.push(basic_types::Event::Right), Event::KeyDown {keycode: Some(Keycode::Up),..} => result.push(basic_types::Event::Up), Event::KeyDown {keycode: Some(Keycode::Down),..} => result.push(basic_types::Event::Down), Event::KeyDown {keycode: Some(Keycode::Space),..} => result.push(basic_types::Event::Action), _ => {} } } Ok(result) } fn present(&mut self) { self.ctx.canvas.present(); } } impl<'a, 'b> Renderer for SdlHandler<'a, 'b> { fn clear(&mut self) { let black = sdl2::pixels::Color::RGB(0, 0, 0); self.ctx.canvas.set_draw_color(black); self.ctx.canvas.clear(); } fn copy( &mut self, texture_path: &String, rect: basic_types::Rect, rotation: f64, ) -> Result<(), String> { let texture: &sdl2::render::Texture<'a> = self.cache.get(texture_path)?; let rotation_point = sdl2::rect::Point::new((rect.width / 2) as i32, (rect.height / 2) as i32); self.ctx .canvas .copy_ex(texture, None, rect, rotation, rotation_point, false, false)?; Ok(()) } fn present(&mut self) { self.ctx.canvas.present(); } } impl Into<Option<sdl2::rect::Rect>> for basic_types::Rect { fn into(self) -> Option<sdl2::rect::Rect> { Some(sdl2::rect::Rect::new( self.x as i32, self.y as i32, self.width as u32, self.height as u32, )) } }
use std::io::{self, BufReader}; use std::io::prelude::*; use std::fs::File; fn main() -> io::Result<()> { let filename = "input.txt"; let f = File::open(filename)?; let f = BufReader::new(f); let mut result = 0; for line in f.lines() { let l = line.unwrap(); let number: i64 = l.parse().unwrap(); result += meta_fuel(number); } println!("{}", result); Ok(()) } fn meta_fuel(x : i64) -> i64{ let mut f = 0; let mut additional_fuel = fuel(x); while additional_fuel > 0{ f += additional_fuel; additional_fuel = fuel(additional_fuel); } return f; } fn fuel(x : i64) -> i64{ return (x / 3) - 2; }
mod utils; use criterion::{criterion_group, criterion_main, Criterion}; use suffix_array::SuffixArray; use utils::*; fn sa_construct(crit: &mut Criterion) { let dir = env!("CARGO_MANIFEST_DIR").to_owned() + "/benches/data"; eprintln!("preparing data in {}...", dir); let (samples, _) = make_data(dir.as_ref()).unwrap(); for sname in samples.into_iter() { eprint!("loading sample {}...", sname); let sdata; if let Ok(tmp) = load_data(dir.as_ref(), sname) { sdata = tmp; eprintln!("yes"); } else { eprintln!("pass"); continue; } let bench_name = format!("saca {}", sname); set_criterion_samples(crit, calc_samples(sdata.len())); crit.bench_function(bench_name.as_ref(), move |b| { b.iter(|| SuffixArray::new(&sdata[..])); }); } } fn calc_samples(slen: usize) -> usize { if slen <= 4096 { 100 } else if slen <= 1024 * 1024 { 10 } else if slen <= 16 * 1024 * 1024 { 3 } else { 2 } } criterion_group!(sa_construct_benches, sa_construct); criterion_main!(sa_construct_benches);
#[macro_use] mod util; pub mod block; pub mod component; pub mod resource; pub mod user; #[allow(unused_imports)] use util::prelude::*; pub use util::{Pack, PackDepth}; arena! { pub block::Boxblock; pub block::CanvasTexture; pub block::Character; pub block::Chat; pub block::ChatChannel; pub block::ChatMessage; pub block::Craftboard; pub block::LayerGroup; pub block::Property; pub block::Scene; pub block::Table; pub block::TerranTexture; pub block::Terran; pub block::Textboard; pub block::World; pub component::BoxblockComponent; pub component::CraftboardComponent; pub component::TextboardComponent; pub resource::ImageData; pub resource::BlockTexture; pub user::Player; }
#![doc = include_str!("../README.md")] pub mod generate; pub mod highlight; pub mod logger; pub mod parse; pub mod playground; pub mod query; pub mod query_testing; pub mod tags; pub mod test; pub mod test_highlight; pub mod test_tags; pub mod util; pub mod wasm; #[cfg(test)] mod tests; // To run compile fail tests #[cfg(doctest)] mod tests;
//! We use a local worker implementation that does not produce a JoinHandle for spawn_pinned. //! This avoids the cost to acquire a JoinHandle. //! //! See: [tokio-rs/tokio#4819](https://github.com/tokio-rs/tokio/issues/4819) //! //! We will not be able to produce a meaningful JoinHandle until WebAssembly targets support //! unwinding. use std::cell::RefCell; use std::future::Future; use std::marker::PhantomData; use std::sync::Arc; use std::{io, thread}; static DEFAULT_WORKER_NAME: &str = "yew-runtime-worker"; use std::sync::atomic::{AtomicUsize, Ordering}; use futures::channel::mpsc::UnboundedSender; use futures::stream::StreamExt; use tokio::task::{spawn_local, LocalSet}; type SpawnTask = Box<dyn Send + FnOnce()>; thread_local! { static TASK_COUNT: RefCell<Option<Arc<AtomicUsize>>> = RefCell::new(None); static LOCAL_SET: LocalSet = LocalSet::new(); } pub(crate) struct LocalWorker { task_count: Arc<AtomicUsize>, tx: UnboundedSender<SpawnTask>, } impl LocalWorker { pub fn new() -> io::Result<Self> { let (tx, mut rx) = futures::channel::mpsc::unbounded::<SpawnTask>(); let task_count: Arc<AtomicUsize> = Arc::default(); let rt = tokio::runtime::Builder::new_current_thread() .enable_all() .build()?; { let task_count = task_count.clone(); thread::Builder::new() .name(DEFAULT_WORKER_NAME.into()) .spawn(move || { TASK_COUNT.with(move |m| { *m.borrow_mut() = Some(task_count); }); LOCAL_SET.with(|local_set| { local_set.block_on(&rt, async move { while let Some(m) = rx.next().await { m(); } }); }); })?; } Ok(Self { task_count, tx }) } pub fn task_count(&self) -> usize { self.task_count.load(Ordering::Acquire) } pub fn spawn_pinned<F, Fut>(&self, f: F) where F: 'static + Send + FnOnce() -> Fut, Fut: 'static + Future<Output = ()>, { let guard = LocalJobCountGuard::new(self.task_count.clone()); // We ignore the result upon a failure, this can never happen unless the runtime is // exiting which all instances of Runtime will be dropped at that time and hence cannot // spawn pinned tasks. let _ = self.tx.unbounded_send(Box::new(move || { spawn_local(async move { let _guard = guard; f().await; }); })); } } pub struct LocalJobCountGuard(Arc<AtomicUsize>); impl LocalJobCountGuard { fn new(inner: Arc<AtomicUsize>) -> Self { inner.fetch_add(1, Ordering::AcqRel); LocalJobCountGuard(inner) } } impl Drop for LocalJobCountGuard { fn drop(&mut self) { self.0.fetch_sub(1, Ordering::AcqRel); } } #[derive(Debug, Clone)] pub(crate) struct LocalHandle { // This type is not send or sync. _marker: PhantomData<*const ()>, task_count: Arc<AtomicUsize>, } impl LocalHandle { pub fn try_current() -> Option<Self> { // We cache the handle to prevent borrowing RefCell. thread_local! { static LOCAL_HANDLE: Option<LocalHandle> = TASK_COUNT .with(|m| m.borrow().clone()) .map(|task_count| LocalHandle { task_count, _marker: PhantomData }); } LOCAL_HANDLE.with(|m| m.clone()) } pub fn current() -> Self { Self::try_current().expect("outside of Yew runtime.") } pub fn spawn_local<F>(&self, f: F) where F: Future<Output = ()> + 'static, { let guard = LocalJobCountGuard::new(self.task_count.clone()); LOCAL_SET.with(move |local_set| { local_set.spawn_local(async move { let _guard = guard; f.await; }) }); } } #[cfg(test)] mod tests { use std::time::Duration; use futures::channel::oneshot; use tokio::test; use tokio::time::timeout; use yew::platform::Runtime; use super::*; #[test] async fn test_local_handle_exists() { assert!(LocalHandle::try_current().is_none()); let runtime = Runtime::default(); let (tx, rx) = oneshot::channel(); runtime.spawn_pinned(move || async move { tx.send(LocalHandle::try_current().is_some()) .expect("failed to send"); }); timeout(Duration::from_secs(5), rx) .await .expect("task timed out") .expect("failed to receive"); } #[test] async fn test_local_handle_spawns_on_same_worker() { assert!(LocalHandle::try_current().is_none()); let runtime = Runtime::default(); let (tx1, rx1) = oneshot::channel(); let (tx2, rx2) = oneshot::channel(); runtime.spawn_pinned(move || async move { let handle = LocalHandle::current(); tx1.send(std::thread::current().id()) .expect("failed to send"); handle.spawn_local(async move { tx2.send(std::thread::current().id()) .expect("failed to send"); }) }); let result1 = timeout(Duration::from_secs(5), rx1) .await .expect("task timed out") .expect("failed to receive"); let result2 = timeout(Duration::from_secs(5), rx2) .await .expect("task timed out") .expect("failed to receive"); assert_eq!(result1, result2); } }
pub mod data_extraction; pub mod fetch; pub mod jobkorea; use crate::posting::mastodon::Mastodon; use std::error::Error; /// 함수를 호출한 시점에서 가장 최근의 채용 공고 id를 가져온다. pub async fn init(http_client: &reqwest::Client) -> Result<u32, Box<dyn Error>> { let jobkorea_document = fetch::fetch(&http_client).await?; let data = data_extraction::data_extract(&jobkorea_document); Ok(data[0].id) } /// 채용 공고를 가져오고 업로드한다. /// 가장 최근의 채용 공고 id를 반환한다. pub async fn cycle( latest_id: u32, http_client: &reqwest::Client, mstdn: &Option<Mastodon>, ) -> Result<u32, Box<dyn Error>> { // 웹 사이트에서 목록을 가져옴 let jobkorea_document = fetch::fetch(&http_client).await?; let data = data_extraction::data_extract(&jobkorea_document); let fetch_latest_id = data[0].id; for i in data.iter() { if i.id > latest_id { println!("[jobkorea/{}] {}", i.id, i.title); match mstdn { Some(m) => { m.posting(&http_client, i).await?; } None => {} } } else { break; } } if fetch_latest_id > latest_id { Ok(fetch_latest_id) } else { Ok(latest_id) } }
// Copyright 2020 IOTA Stiftung // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on // an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and limitations under the License. use tiny_keccak::{Hasher, Keccak}; use bee_ternary::{ bigint::{ common::{BigEndian, U8Repr}, I384, T242, T243, }, Btrit, Trits, T1B1, }; use crate::Sponge; const HASH_LEN: usize = 243; #[derive(Clone)] pub struct Kerl { keccak: Keccak, binary_buffer: I384<BigEndian, U8Repr>, ternary_buffer: T243<Btrit>, } impl Kerl { pub fn new() -> Self { Self { keccak: Keccak::v384(), binary_buffer: I384::<BigEndian, U8Repr>::default(), ternary_buffer: T243::<Btrit>::default(), } } } impl Default for Kerl { fn default() -> Self { Kerl::new() } } #[derive(Debug)] pub enum Error { NotMultipleOfHashLength, TernaryBinaryConversion(bee_ternary::bigint::common::Error), } impl From<bee_ternary::bigint::common::Error> for Error { fn from(error: bee_ternary::bigint::common::Error) -> Self { Error::TernaryBinaryConversion(error) } } impl Sponge for Kerl { const IN_LEN: usize = HASH_LEN; const OUT_LEN: usize = HASH_LEN; type Error = Error; /// Absorb `input` into the sponge by copying `HASH_LEN` chunks of it into its internal /// state and transforming the state before moving on to the next chunk. /// /// If `input` is not a multiple of `HASH_LEN` with the last chunk having `n < HASH_LEN` trits, /// the last chunk will be copied to the first `n` slots of the internal state. The remaining /// data in the internal state is then just the result of the last transformation before the /// data was copied, and will be reused for the next transformation. fn absorb(&mut self, input: &Trits) -> Result<(), Self::Error> { if input.len() % Self::IN_LEN != 0 { return Err(Error::NotMultipleOfHashLength); } for trits_chunk in input.chunks(Self::IN_LEN) { self.ternary_buffer.inner_mut().copy_from(&trits_chunk); // Unwrapping is ok because this cannot fail. // // TODO: Replace with a dedicated `TryFrom` implementation with `Error = !`. // // TODO: Convert to `t242` without cloning. // // TODO: Convert to binary without cloning. self.binary_buffer = self.ternary_buffer.clone().into_t242().into(); self.keccak.update(self.binary_buffer.inner_ref()); } Ok(()) } /// Reset the internal state by overwriting it with zeros. fn reset(&mut self) { // TODO: Overwrite the internal buffer directly rather then setting it to a new Keccak // object. This requires using `KeccakState::reset` via a new method `Keccak::method` // calling its internal state. self.keccak = Keccak::v384(); } /// Squeeze the sponge by copying the calculated hash into the provided `buf`. This will fill /// the buffer in chunks of `HASH_LEN` at a time. /// /// If the last chunk is smaller than `HASH_LEN`, then only the fraction that fits is written /// into it. fn squeeze_into(&mut self, buf: &mut Trits<T1B1>) -> Result<(), Self::Error> { if buf.len() % Self::OUT_LEN != 0 { return Err(Error::NotMultipleOfHashLength); } for trit_chunk in buf.chunks_mut(Self::OUT_LEN) { // Create a new Keccak in lieu of resetting the internal one let mut keccak = Keccak::v384(); // Swap out the internal one and the new one std::mem::swap(&mut self.keccak, &mut keccak); keccak.finalize(&mut self.binary_buffer.inner_mut()[..]); let ternary_value = T242::from_i384_ignoring_mst(self.binary_buffer).into_t243(); trit_chunk.copy_from(&ternary_value.inner_ref()); self.binary_buffer.not_inplace(); self.keccak.update(self.binary_buffer.inner_ref()); } Ok(()) } } #[cfg(test)] mod tests { use super::*; use bee_ternary::{T1B1Buf, T3B1Buf, TritBuf, TryteBuf}; macro_rules! test_kerl { ($test_name:ident, $input_trytes:expr, $output_trytes:expr) => { #[test] fn $test_name() { let input = $input_trytes; let output = $output_trytes; let mut kerl = Kerl::new(); let input_trytes = TryteBuf::try_from_str(input); assert!(input_trytes.is_ok()); let input_trytes = input_trytes.unwrap(); let input_trit_buf = input_trytes.as_trits().encode::<T1B1Buf>(); let expected_hash = TryteBuf::try_from_str(output); assert!(expected_hash.is_ok()); let expected_hash = expected_hash.unwrap(); assert!(kerl.absorb(input_trit_buf.as_slice()).is_ok()); let output_len = expected_hash .as_trits() .len(); let mut calculated_hash = TritBuf::<T1B1Buf>::zeros(output_len); assert!(kerl.squeeze_into(&mut calculated_hash.as_slice_mut()).is_ok()); let calculated_hash = calculated_hash.encode::<T3B1Buf>(); assert_eq!(calculated_hash.as_slice(), expected_hash.as_trits()); } }; ( $( $test_name:ident: $input_trytes:expr => $output_trytes:expr ),+ $(,)?) => { $( test_kerl!($test_name, $input_trytes, $output_trytes); )+ } } test_kerl!( from_iota_go_normal_trytes_1: "HHPELNTNJIOKLYDUW9NDULWPHCWFRPTDIUWLYUHQWWJVPAKKGKOAZFJPQJBLNDPALCVXGJLRBFSHATF9C" => "DMJWZTDJTASXZTHZFXFZXWMNFHRTKWFUPCQJXEBJCLRZOM9LPVJSTCLFLTQTDGMLVUHOVJHBBUYFD9AXX", from_iota_go_normal_trytes_2: "QAUGQZQKRAW9GKEFIBUD9BMJQOABXBTFELCT9GVSZCPTZOSFBSHPQRWJLLWURPXKNAOWCSVWUBNDSWMPW" => "HOVOHFEPCIGTOFEAZVXAHQRFFRTPQEEKANKFKIHUKSGRICVADWDMBINDYKRCCIWBEOPXXIKMLNSOHEAQZ", from_iota_go_normal_trytes_3: "MWBLYBSRKEKLDHUSRDSDYZRNV9DDCPN9KENGXIYTLDWPJPKBHQBOALSDH9LEJVACJAKJYPCFTJEROARRW" => "KXBKXQUZBYZFSYSPDPCNILVUSXOEHQWWWFKZPFCQ9ABGIIQBNLSWLPIMV9LYNQDDYUS9L9GNUIYKYAGVZ", from_iota_go_output_with_non_zero_243rd_trit: "GYOMKVTSNHVJNCNFBBAH9AAMXLPLLLROQY99QN9DLSJUHDPBLCFFAIQXZA9BKMBJCYSFHFPXAHDWZFEIZ" => "OXJCNFHUNAHWDLKKPELTBFUCVW9KLXKOGWERKTJXQMXTKFKNWNNXYD9DMJJABSEIONOSJTTEVKVDQEWTW", from_iota_go_input_with_243_trits: "EMIDYNHBWMBCXVDEFOFWINXTERALUKYYPPHKP9JJFGJEIUY9MUDVNFZHMMWZUYUSWAIOWEVTHNWMHANBH" => "EJEAOOZYSAWFPZQESYDHZCGYNSTWXUMVJOVDWUNZJXDGWCLUFGIMZRMGCAZGKNPLBRLGUNYWKLJTYEAQX", from_iota_go_output_with_more_than_243_trits: "9MIDYNHBWMBCXVDEFOFWINXTERALUKYYPPHKP9JJFGJEIUY9MUDVNFZHMMWZUYUSWAIOWEVTHNWMHANBH" => "G9JYBOMPUXHYHKSNRNMMSSZCSHOFYOYNZRSZMAAYWDYEIMVVOGKPJBVBM9TDPULSFUNMTVXRKFIDOHUXXVYDLFSZYZTWQYTE9SPYYWYTXJYQ9IFGYOLZXWZBKWZN9QOOTBQMWMUBLEWUEEASRHRTNIQWJQNDWRYLCA", from_iota_go_input_and_output_with_more_than_243_trits: "G9JYBOMPUXHYHKSNRNMMSSZCSHOFYOYNZRSZMAAYWDYEIMVVOGKPJBVBM9TDPULSFUNMTVXRKFIDOHUXXVYDLFSZYZTWQYTE9SPYYWYTXJYQ9IFGYOLZXWZBKWZN9QOOTBQMWMUBLEWUEEASRHRTNIQWJQNDWRYLCA" => "LUCKQVACOGBFYSPPVSSOXJEKNSQQRQKPZC9NXFSMQNRQCGGUL9OHVVKBDSKEQEBKXRNUJSRXYVHJTXBPDWQGNSCDCBAIRHAQCOWZEBSNHIJIGPZQITIBJQ9LNTDIBTCQ9EUWKHFLGFUVGGUWJONK9GBCDUIMAYMMQX", negative_byte_input: "DJ9WGAKRZOMH9KVRCHGCDCREXZVDKY9FXAXVSLELYADXHQCQQSMQYAEEBTEIWTQDUZIOFSFLBQQA9RUPX" => "XRZCRWFXU9UYRKFQRKWROIRGEVGTUGUBKDYGPWDTUXXOFVXWRTQBRRGGUSIEMPAISTUEYEZJXXEPUTY9D", ); }
use crate::client::*; use tokio::{ net::{TcpListener, TcpStream}, prelude::*, }; const SECURE_TCP_PORT: u32 = 8883; const UNSECURE_TCP_PORT: u32 = 1883; const NUM_THREADS: u32 = 4; pub struct MqttServer {} impl MqttServer { async fn client_spawner(stream: TcpStream) -> Client { println!("Spawning a client"); Client::new(stream) } pub async fn start() -> Result<(), Box<dyn std::error::Error>> { let bind_addr = String::from("0.0.0.0:") + &UNSECURE_TCP_PORT.to_string(); let mut unsecure_listener = TcpListener::bind(bind_addr.clone()).await?; println!("Listening on {}", bind_addr); loop { // Asynchronously wait for an inbound socket. let (socket, addr) = unsecure_listener.accept().await?; println!("Got a new socket from addr: {:?}", addr); // And this is where much of the magic of this server happens. We // crucially want all clients to make progress concurrently, rather than // blocking one on completion of another. To achieve this we use the // `tokio::spawn` function to execute the work in the background. // // Essentially here we're executing a new task to run concurrently, // which will allow all of our clients to be processed concurrently. let client = MqttServer::client_spawner(socket).await; tokio::spawn(client.run()); } } }
#[doc = "Register `RX_ORDSETR` reader"] pub type R = crate::R<RX_ORDSETR_SPEC>; #[doc = "Field `RXORDSET` reader - Rx ordered set code detected"] pub type RXORDSET_R = crate::FieldReader; #[doc = "Field `RXSOP3OF4` reader - The bit indicates the number of correct K‑codes. For debug purposes only."] pub type RXSOP3OF4_R = crate::BitReader; #[doc = "Field `RXSOPKINVALID` reader - The bitfield is for debug purposes only. Others: Invalid"] pub type RXSOPKINVALID_R = crate::FieldReader; impl R { #[doc = "Bits 0:2 - Rx ordered set code detected"] #[inline(always)] pub fn rxordset(&self) -> RXORDSET_R { RXORDSET_R::new((self.bits & 7) as u8) } #[doc = "Bit 3 - The bit indicates the number of correct K‑codes. For debug purposes only."] #[inline(always)] pub fn rxsop3of4(&self) -> RXSOP3OF4_R { RXSOP3OF4_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bits 4:6 - The bitfield is for debug purposes only. Others: Invalid"] #[inline(always)] pub fn rxsopkinvalid(&self) -> RXSOPKINVALID_R { RXSOPKINVALID_R::new(((self.bits >> 4) & 7) as u8) } } #[doc = "UCPD Rx ordered set register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rx_ordsetr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct RX_ORDSETR_SPEC; impl crate::RegisterSpec for RX_ORDSETR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`rx_ordsetr::R`](R) reader structure"] impl crate::Readable for RX_ORDSETR_SPEC {} #[doc = "`reset()` method sets RX_ORDSETR to value 0"] impl crate::Resettable for RX_ORDSETR_SPEC { const RESET_VALUE: Self::Ux = 0; }
use console; use std::fmt::Write; #[derive(Debug)] pub enum ErrorType { Warning, Fatal, } #[derive(Debug)] pub struct FileError { pub kind: String, pub message: String, error_type: ErrorType, } #[derive(Debug)] pub struct Accumulator(pub Vec<FileError>); impl Accumulator { pub fn init() -> Self { return Accumulator(Vec::new()); } pub fn flush(&mut self) { let mut tag: Vec<&str> = Vec::new(); let error_tage = console::Style::new() .apply_to("Error") .bold() .white() .bg(console::Color::Yellow) .to_string(); let mut set_color = |typa: &ErrorType, tag: &mut Vec<&str>| { let setting = console::Style::new().bright(); match typa { ErrorType::Fatal => { tag.push("Fatal"); setting.red() } ErrorType::Warning => { tag.push("Warning"); setting.yellow() } } }; for x in self.0.iter() { let mut buffer = String::new(); let msg_to_display = set_color(&x.error_type, &mut tag) .apply_to(&x.message) .to_string(); let full_tag = console::style(format!("{} Error", tag.iter().next().unwrap())) .on_red() .bright(); writeln!(buffer, "{} {}", full_tag, msg_to_display) .expect("Cannot write Error to the display!"); println!("{}", buffer); } } } impl FileError { pub fn new() -> Self { return FileError { kind: String::from("file"), message: String::new(), error_type: ErrorType::Fatal, }; } fn chain(self) -> Self { return self; } pub fn set_type(mut self, e: ErrorType) -> Self { self.error_type = e; self.chain() } pub fn describe(mut self, kind: &str) -> Self { self.kind = String::new(); self.kind.push_str(kind); self.chain() } pub fn set_message(mut self, msg: &str) -> Self { self.message.push_str(msg); self.chain() } pub fn panic(self) { let setting = console::Style::new(); let error_tag = setting .apply_to("Error") .bold() .white() .bg(console::Color::Yellow) .to_string(); let styled_msg = setting.bright().red().apply_to(self.message).to_string(); print!("{}: {}", error_tag, styled_msg); std::process::exit(1) } } impl From<std::io::Error> for FileError { fn from(error: std::io::Error) -> Self { let new_error = FileError::new() .describe("io") .set_message(&error.to_string()); return new_error; } } #[test] fn test_err_handler() { let mut Handler = Accumulator::init(); for _ in 0..2 { let newmsg = FileError::new() .set_message("bro wtf") .set_type(ErrorType::Warning) .describe("file"); Handler.0.push(newmsg); } Handler.flush(); }
use iterators::*; use std::alloc::{GlobalAlloc, Layout, System}; use std::sync::atomic::{AtomicUsize, Ordering::SeqCst}; use std::time::Instant; struct Counter; static ALLOCATED: AtomicUsize = AtomicUsize::new(0); unsafe impl GlobalAlloc for Counter { unsafe fn alloc(&self, layout: Layout) -> *mut u8 { let ret = System.alloc(layout); if !ret.is_null() { ALLOCATED.fetch_add(layout.size(), SeqCst); } return ret; } unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { System.dealloc(ptr, layout); } } #[global_allocator] static A: Counter = Counter; fn main() { let nums: Vec<u64> = (0..100_000).collect(); let multiple_time = time_function(&nums, filter_map_filter_callback); let single_time = time_function(&nums, fold_callback); let single_loop_time = time_function(&nums, for_loop_callback); let multiple_inline_time = time_function(&nums, filter_map_filter_inline); let single_inline_time = time_function(&nums, fold_inline); let single_loop_inline_time = time_function(&nums, for_loop_inline); println!("Times (sec):"); println!("{0: <20}{1:}", "Multiple:", multiple_time); println!("{0: <20}{1:}", "Single:", single_time); println!("{0: <20}{1:}", "Loop:", single_loop_time); println!("{0: <20}{1:}", "Multiple Inline:", multiple_inline_time); println!("{0: <20}{1:}", "Single Inline:", single_inline_time); println!("{0: <20}{1:}", "Loop Inline:", single_loop_inline_time); let multiple_weight = weigh_function(&nums, filter_map_filter_callback); let single_weight = weigh_function(&nums, fold_callback); let single_loop_weight = weigh_function(&nums, for_loop_callback); let multiple_inline_weight = weigh_function(&nums, filter_map_filter_inline); let single_inline_weight = weigh_function(&nums, fold_inline); let single_loop_inline_weight = weigh_function(&nums, for_loop_inline); println!("\nWeights (bytes):"); println!("{0: <20}{1:}", "Multiple:", multiple_weight); println!("{0: <20}{1:}", "Single:", single_weight); println!("{0: <20}{1:}", "Loop:", single_loop_weight); println!("{0: <20}{1:}", "Multiple Inline:", multiple_inline_weight); println!("{0: <20}{1:}", "Single Inline:", single_inline_weight); println!("{0: <20}{1:}", "Loop Inline:", single_loop_inline_weight); } type Func = fn(args: &[u64]) -> Vec<u64>; fn time_function(nums: &[u64], f: Func) -> f64 { for _ in 0..1_000 { let _ = f(nums); } let start = Instant::now(); let output = f(nums); let end = start.elapsed().as_micros() as f64; println!("Ignore this {}", output[0]); // Convert to seconds end / 1_000_000.0 } fn weigh_function(nums: &[u64], f: Func) -> usize { let start = ALLOCATED.load(SeqCst); let output = f(nums); let end = ALLOCATED.load(SeqCst); println!("Ignore this {}", output[0]); end - start }
pub(crate) mod base; pub(crate) mod command; pub(crate) mod config; pub(crate) mod dict; pub(crate) mod files; pub(crate) mod into; pub(crate) mod meta; pub(crate) mod types; pub(crate) use base::{Primitive, Value}; pub(crate) use command::command_dict; pub(crate) use dict::{Dictionary, TaggedDictBuilder, TaggedListBuilder}; pub(crate) use files::dir_entry_dict;
use crate::common::{ Value, Table, Array }; use crate::vm::{ VM, env::{ Env, aux::* }, RuntimeError }; use crate::{ expect, optional }; pub fn load(env: &mut Env) { let tbl = Table::new(); tbl_builtin(&tbl, "upper", &str_upper); tbl_builtin(&tbl, "lower", &str_lower); tbl_builtin(&tbl, "split", &str_split); tbl_builtin(&tbl, "trim", &str_trim); tbl_builtin(&tbl, "byte", &str_byte); tbl_builtin(&tbl, "sub", &str_sub); env.set_global(Value::String("string".into()), Value::Table(tbl)) } fn str_upper(vm: &mut VM) -> Result<Value, RuntimeError> { let str = expect!(String, vm)?; Ok(Value::String(str.to_uppercase())) } fn str_lower(vm: &mut VM) -> Result<Value, RuntimeError> { let str = expect!(String, vm)?; Ok(Value::String(str.to_lowercase())) } fn str_split(vm: &mut VM) -> Result<Value, RuntimeError> { let str = expect!(String, vm)?; let pat = optional!(String, " ".to_string(), vm); let mut vals = Vec::new(); for found in str.split(&pat) { vals.push(Value::String(found.to_string())) } Ok(Value::Array(Array::new(vals))) } fn str_trim(vm: &mut VM) -> Result<Value, RuntimeError> { let str = expect!(String, vm)?; Ok(Value::String(str.trim().to_string())) } fn str_byte(vm: &mut VM) -> Result<Value, RuntimeError> { let str = expect!(String, vm)?; let pos = optional!(Number, 0.0, vm) as usize; Ok(Value::Number(str.bytes().nth(pos).unwrap_or(b'\0').into())) } fn str_sub(vm: &mut VM) -> Result<Value, RuntimeError> { let str = expect!(String, vm)?; let len = str.len(); let mut start = expect!(Number, vm)? as usize; let mut end = optional!(Number, len as f64, vm) as usize; start = start.clamp(0, len); end = end.clamp(0, len); start = start.clamp(0, end); Ok(Value::String(str[start..end].to_string())) }
/* * AVL tree list test (Rust) * * Copyright (c) 2022 Project Nayuki. (MIT License) * https://www.nayuki.io/page/avl-tree-list * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * - The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * - The Software is provided "as is", without warranty of any kind, express or * implied, including but not limited to the warranties of merchantability, * fitness for a particular purpose and noninfringement. In no event shall the * authors or copyright holders be liable for any claim, damages or other * liability, whether in an action of contract, tort or otherwise, arising from, * out of or in connection with the Software or the use or other dealings in the * Software. */ extern crate rand; use rand::distributions::IndependentSample; use rand::distributions::range::Range; mod avltreelist; use avltreelist::AvlTreeList; fn main() { test_add(); test_add_list(); test_set(); test_insert_at_beginning(); test_insert_at_end(); test_insert_at_middle(); test_insert_list(); test_insert_many_beginning(); test_insert_many_end(); test_insert_many_everywhere(); test_remove(); test_clear(); test_move_iterator(); test_ref_iterator(); test_against_rust_vec_randomly(); println!("Test passed"); } fn test_add() { let mut list = AvlTreeList::<&'static str>::new(); list.push("January"); list.push("February"); list.push("March"); list.push("April"); list.push("May"); list.push("June"); list.check_structure(); assert_eq!(list.len(), 6); assert_eq!(list[0], "January" ); assert_eq!(list[1], "February"); assert_eq!(list[2], "March" ); assert_eq!(list[3], "April" ); assert_eq!(list[4], "May" ); assert_eq!(list[5], "June" ); } fn test_add_list() { let mut list = AvlTreeList::<&'static str>::new(); list.extend(vec!["January"].drain(..)); list.extend(vec!["February", "March", "April"].drain(..)); list.extend(vec!["May", "June", "July", "August", "September", "October", "November", "December"].drain(..)); assert_eq!(list.len(), 12); assert_eq!(list[ 0], "January" ); assert_eq!(list[ 1], "February" ); assert_eq!(list[ 2], "March" ); assert_eq!(list[ 3], "April" ); assert_eq!(list[ 4], "May" ); assert_eq!(list[ 5], "June" ); assert_eq!(list[ 6], "July" ); assert_eq!(list[ 7], "August" ); assert_eq!(list[ 8], "September"); assert_eq!(list[ 9], "October" ); assert_eq!(list[10], "November" ); assert_eq!(list[11], "December" ); } fn test_set() { let mut list = AvlTreeList::<&'static str>::new(); for _ in 0 .. 10 { list.push(""); } list[0] = "zero"; list[1] = "ten"; list[2] = "twenty"; list[3] = "thirty"; list[4] = "forty"; list[5] = "fifty"; list[6] = "sixty"; list[7] = "seventy"; list[8] = "eighty"; list[9] = "ninety"; assert_eq!(list.len(), 10); assert_eq!(list[0], "zero" ); assert_eq!(list[1], "ten" ); assert_eq!(list[2], "twenty" ); assert_eq!(list[3], "thirty" ); assert_eq!(list[4], "forty" ); assert_eq!(list[5], "fifty" ); assert_eq!(list[6], "sixty" ); assert_eq!(list[7], "seventy"); assert_eq!(list[8], "eighty" ); assert_eq!(list[9], "ninety" ); } fn test_insert_at_beginning() { let mut list = AvlTreeList::<&'static str>::new(); list.insert(0, "Sunday"); list.insert(0, "Monday"); list.insert(0, "Tuesday"); assert_eq!(list.len(), 3); assert_eq!(list[0], "Tuesday"); assert_eq!(list[1], "Monday" ); assert_eq!(list[2], "Sunday" ); } fn test_insert_at_end() { let mut list = AvlTreeList::<&'static str>::new(); list.insert(0, "Saturday"); list.insert(1, "Friday"); list.insert(2, "Thursday"); list.insert(3, "Wednesday"); assert_eq!(list.len(), 4); assert_eq!(list[0], "Saturday" ); assert_eq!(list[1], "Friday" ); assert_eq!(list[2], "Thursday" ); assert_eq!(list[3], "Wednesday"); } fn test_insert_at_middle() { let mut list = AvlTreeList::<&'static str>::new(); list.insert(0, "Up"); list.insert(1, "Down"); list.insert(1, "Left"); list.insert(2, "Right"); list.insert(1, "Front"); list.insert(2, "Back"); assert_eq!(list.len(), 6); assert_eq!(list[0], "Up" ); assert_eq!(list[1], "Front"); assert_eq!(list[2], "Back" ); assert_eq!(list[3], "Left" ); assert_eq!(list[4], "Right"); assert_eq!(list[5], "Down" ); } fn test_insert_list() { let mut list = AvlTreeList::<&'static str>::new(); list.insert_iter(0, vec!["1", "2", "3", "5"].drain(..)); list.insert_iter(4, vec!["377", "610", "987"].drain(..)); list.insert_iter(4, vec!["8", "13", "21", "144", "233"].drain(..)); list.insert_iter(7, vec!["34", "55", "89"].drain(..)); assert_eq!(list.len(), 15); assert_eq!(list[ 0], "1"); assert_eq!(list[ 1], "2"); assert_eq!(list[ 2], "3"); assert_eq!(list[ 3], "5"); assert_eq!(list[ 4], "8"); assert_eq!(list[ 5], "13"); assert_eq!(list[ 6], "21"); assert_eq!(list[ 7], "34"); assert_eq!(list[ 8], "55"); assert_eq!(list[ 9], "89"); assert_eq!(list[10], "144"); assert_eq!(list[11], "233"); assert_eq!(list[12], "377"); assert_eq!(list[13], "610"); assert_eq!(list[14], "987"); } // Stresses the self-balancing mechanism fn test_insert_many_beginning() { let n: i32 = 300_000; let mut list = AvlTreeList::<i32>::new(); for i in (0 .. n).rev() { list.insert(0, i); } for (i, &x) in (0i32 .. ).zip((&list).into_iter()) { assert_eq!(x, i); } } // Stresses the self-balancing mechanism fn test_insert_many_end() { let n: i32 = 300_000; let mut list = AvlTreeList::<i32>::new(); for i in 0 .. n { list.push(i); } for (i, &x) in (0i32 .. ).zip((&list).into_iter()) { assert_eq!(x, i); } } // Adds in a weird binary pattern to stress arrays and linked lists fn test_insert_many_everywhere() { let n: i32 = 18; let mut list = AvlTreeList::<i32>::new(); list.push(0); for i in (0 .. n).rev() { let mut j: i32 = 1 << i; let mut k: usize = 1; while j < (1 << n) { list.insert(k, j); j += 2 << i; k += 2; } } for (i, &x) in (0i32 .. ).zip((&list).into_iter()) { assert_eq!(x, i); } } fn test_remove() { let mut list = AvlTreeList::<char>::new(); let s = "the quick brown fox jumped over the lazy dog"; for c in s.chars() { list.push(c); } assert_eq!(list.len(), s.len()); assert_eq!(list.remove( 2), 'e'); assert_eq!(list.remove( 4), 'u'); assert_eq!(list.remove( 3), 'q'); assert_eq!(list.remove( 2), ' '); assert_eq!(list.remove(12), 'f'); assert_eq!(list.remove(11), ' '); assert_eq!(list.remove(10), 'n'); assert_eq!(list.remove( 9), 'w'); assert_eq!(list.remove(11), ' '); assert_eq!(list.remove(11), 'j'); assert_eq!(list.remove(11), 'u'); assert_eq!(list.remove(10), 'x'); assert_eq!(list.remove(11), 'p'); assert_eq!(list.remove(12), 'd'); assert_eq!(list.remove(11), 'e'); assert_eq!(list.remove(13), 'v'); assert_eq!(list.remove(13), 'e'); assert_eq!(list.remove(19), 'l'); assert_eq!(list.remove(20), 'z'); assert_eq!(list.remove(19), 'a'); assert_eq!(list.remove(18), ' '); assert_eq!(list.remove(22), 'g'); let s = "thick broom or they do"; assert_eq!(list.len(), s.len()); for (i, c) in s.chars().enumerate() { assert_eq!(list[i], c); } assert_eq!(list.remove(0), 't'); assert_eq!(list.remove(2), 'c'); assert_eq!(list.remove(2), 'k'); assert_eq!(list.remove(2), ' '); assert_eq!(list.remove(2), 'b'); assert_eq!(list.remove(2), 'r'); assert_eq!(list.remove(2), 'o'); assert_eq!(list.remove(2), 'o'); assert_eq!(list.remove(4), 'o'); assert_eq!(list.remove(7), 'h'); assert_eq!(list.remove(5), ' '); assert_eq!(list.remove(5), 't'); assert_eq!(list.remove(9), 'o'); assert_eq!(list.remove(7), ' '); assert_eq!(list.remove(6), 'y'); let s = "him red"; for (i, c) in s.chars().enumerate() { assert_eq!(list[i], c); } } fn test_clear() { let mut list = AvlTreeList::<i32>::new(); for i in 0i32 .. 20 { list.push(i * i); } list.clear(); assert_eq!(list.len(), 0); list.push(- 1); list.push(- 8); list.push(-27); assert_eq!(list.len(), 3); assert_eq!(list[0], - 1); assert_eq!(list[1], - 8); assert_eq!(list[2], -27); } fn test_move_iterator() { let mut list = AvlTreeList::<i32>::new(); for i in 0 .. 50 { list.push(i * i); } let mut iter = list.into_iter(); for i in 0 .. 50 { assert_eq!(iter.next(), Some(i * i)); } assert_eq!(iter.next(), None); } fn test_ref_iterator() { let mut list = AvlTreeList::<i32>::new(); for i in 0 .. 50 { list.push(i * i); } let mut iter = (&list).into_iter().copied(); for i in 0 .. 50 { assert_eq!(iter.next(), Some(i * i)); } assert_eq!(iter.next(), None); } // Comprehensively tests all the defined methods. fn test_against_rust_vec_randomly() { let trials = 100_000; let rng = &mut rand::thread_rng(); let opcountdist = Range::new(1, 101); let valuedist = Range::new(0i32, 1_000_000); let mut list0 = Vec::<i32>::new(); let mut list1 = AvlTreeList::<i32>::new(); let mut size: usize = 0; for _ in 0 .. trials { let op = Range::new(0, 100).ind_sample(rng); if op < 1 { // Clear list1.check_structure(); list0.clear(); list1.clear(); size = 0; } else if op < 2 { // Set if size > 0 { let index = Range::new(0, size).ind_sample(rng); let val = valuedist.ind_sample(rng); list0[index] = val; list1[index] = val; } } else if op < 30 { // Random insertion let n = opcountdist.ind_sample(rng); for _ in 0 .. n { let index = Range::new(0, size + 1).ind_sample(rng); let val = valuedist.ind_sample(rng); list0.insert(index, val); list1.insert(index, val); } size += n; } else if op < 50 { // Ascending insertion let n = opcountdist.ind_sample(rng); let offset = Range::new(0, size + 1).ind_sample(rng); for i in 0 .. n { let val = valuedist.ind_sample(rng); list0.insert(offset + i, val); list1.insert(offset + i, val); } size += n; } else if op < 70 { // Descending insertion let n = opcountdist.ind_sample(rng); let offset = Range::new(0, size + 1).ind_sample(rng); for _ in 0 .. n { let val = valuedist.ind_sample(rng); list0.insert(offset, val); list1.insert(offset, val); } size += n; } else if op < 80 { // Random deletion let n = std::cmp::min(opcountdist.ind_sample(rng), size); for _ in 0 .. n { let index = Range::new(0, size).ind_sample(rng); assert_eq!(list0.remove(index), list1.remove(index)); size -= 1; } } else if op < 90 { // Ascending deletion if size > 0 { let offset = Range::new(0, size).ind_sample(rng); let n = std::cmp::min(opcountdist.ind_sample(rng), size - offset); for _ in 0 .. n { assert_eq!(list0.remove(offset), list1.remove(offset)); } size -= n; } } else if op < 100 { // Descending deletion if size > 0 { let offset = Range::new(0, size).ind_sample(rng); let n = std::cmp::min(opcountdist.ind_sample(rng), offset + 1); for i in 0 .. n { assert_eq!(list0.remove(offset - i), list1.remove(offset - i)); } size -= n; } } else { unreachable!(); } assert_eq!(list0.len(), size); assert_eq!(list1.len(), size); if size > 0 { let indexdist = Range::new(0, size); for _ in 0 .. 10 { let index = indexdist.ind_sample(rng); assert_eq!(list0[index], list1[index]); } } } }
type Ingredient = [i64; 5]; fn parse_ingredient(s: &str) -> Ingredient { let mut parts = s.split(' '); parts.next(); let mut properties = [0; 5]; for p in properties.iter_mut() { parts.next(); *p = parts.next().unwrap().trim_end_matches(',').parse().unwrap(); } properties } fn parse_input(s: &str) -> Vec<Ingredient> { s.lines().map(|l| parse_ingredient(l)).collect() } fn part1(ingredients: &[Ingredient]) { let mut max_score = 0; for n0 in 1..=100 { for n1 in 1..=100 { for n2 in 1..=100 { for n3 in 1..=100 { if n0 + n1 + n2 + n3 != 100 { continue; } let mut scores = [0; 4]; for i in 0..4 { scores[i] = std::cmp::max(0, ingredients[0][i] * n0 + ingredients[1][i] * n1 + ingredients[2][i] * n2 + ingredients[3][i] * n3); } let score = scores.iter().product(); if score > max_score { max_score = score; } } } } } println!("{}", max_score); } fn part2(ingredients: &[Ingredient]) { let mut max_score = 0; for n0 in 1..=100 { for n1 in 1..=100 { for n2 in 1..=100 { for n3 in 1..=100 { if n0 + n1 + n2 + n3 != 100 { continue; } let mut scores = [0; 5]; for i in 0..5 { scores[i] = std::cmp::max(0, ingredients[0][i] * n0 + ingredients[1][i] * n1 + ingredients[2][i] * n2 + ingredients[3][i] * n3); } if scores[4] != 500 { continue; } let score = scores.iter().take(4).product(); if score > max_score { max_score = score; } } } } } println!("{}", max_score); } fn main() { let input = parse_input(&std::fs::read_to_string("input").unwrap()); part1(&input); part2(&input); }
use std::error; //Has an Error Trait that we will find useful use std::io::{Read, Write}; use thiserror::*; #[derive(Error, Debug, PartialEq)] pub enum BErr { #[error("No Target Provided")] NoTarget, //Format string can reference parts with .0 #[error("Divide by Zero Error")] DivErr, } fn main() -> Result<(), Box<dyn error::Error>> { let mut it = std::env::args().skip(1); let target = it.next().ok_or(BErr::NoTarget)?; let mut tot = 0; let mut count = 0; let mut target = std::fs::OpenOptions::new() .create(true) .write(true) .open(target)?; for a in it { let s = sum_file(&a)?; tot += s; count += 1; writeln!(target, "{: <25}= {}", a, s)?; } if count == 0 { return Err(BErr::DivErr.into()); } writeln!(target, "----------------")?; writeln!(target, "AVG = {}", tot / count)?; println!("DONE"); Ok(()) } pub fn sum_file(fname: &str) -> anyhow::Result<isize> { let mut s = String::new(); std::fs::File::open(fname)?.read_to_string(&mut s)?; let mut res = 0; for n in s.trim().split('\n') { res += n.parse::<isize>()?; } Ok(res) }
use regex::Regex; use std::io::BufRead; #[derive(Debug)] pub struct Dimacs { pub n_vars: usize, pub clauses: Vec<Vec<i64>>, } pub fn parse_dimacs_from_buf_reader<F>(reader: &mut F) -> Dimacs where F: std::io::BufRead, { let mut n_clauses = 0usize; let mut n_vars = 0usize; let mut clauses = vec![]; for line in reader.lines() { let line = line.unwrap(); let line = line.trim(); if line.is_empty() { continue; } if line.starts_with('c') { continue; } else if line.starts_with('p') { let re_cnf = Regex::new(r"p\s+cnf\s+(\d+)\s+(\d+)").unwrap(); if let Some(cap) = re_cnf.captures(&line) { n_vars = cap[1].parse().unwrap(); n_clauses = cap[2].parse().unwrap(); } } else { let re = Regex::new(r"(-?\d+)").unwrap(); let mut cl = vec![]; for (_, cap) in re.captures_iter(&line).enumerate() { let l = match cap[1].parse::<i64>().unwrap() { 0 => continue, n => n, }; cl.push(l); } clauses.push(cl); if clauses.len() == n_clauses { break; } } } Dimacs { n_vars, clauses } }
use std::fmt; use std::mem::replace; use std::ptr::copy_nonoverlapping; use std::ops::{Deref, DerefMut}; use std::iter::FromIterator; use std::hash::{Hash, Hasher}; use std::hint::unreachable_unchecked; use coalesce::{Coalesce2, coalesce}; use crate::array_vec::ArrayVec; use crate::vector::Vector; use crate::array::Array; #[path = "index_impls.rs"] mod index_impls; pub trait Spilled<T> { fn spill(v: T) -> Self; } impl<T: Array> Spilled<ArrayVec<T>> for Vec<T::Item> { fn spill(mut v: ArrayVec<T>) -> Self { let len = v.len(); let mut s = Self::with_capacity(len); unsafe { v.set_len(0); copy_nonoverlapping(v.as_ptr(), s.as_mut_ptr(), len); s.set_len(len); } s } } pub struct SmallVec<T: Array, S = Vec<<T as Array>::Item>>(Coalesce2<ArrayVec<T>, S>); impl<T: Array, S: Clone> Clone for SmallVec<T, S> where ArrayVec<T>: Clone { fn clone(&self) -> Self { SmallVec(self.0.clone()) } } impl<T: Array, S: Vector + Spilled<ArrayVec<T>>> SmallVec<T, S> { #[inline] pub fn spill(&mut self) { if !self.is_spilled() { match replace(&mut self.0, Coalesce2::B(S::new())) { Coalesce2::A(v) => { self.0 = Coalesce2::B(S::spill(v)); }, _ => unsafe { unreachable_unchecked() }, } } } #[inline] pub fn is_spilled(&self) -> bool { match self.0.as_ref() { Coalesce2::A(..) => false, Coalesce2::B(..) => true, } } } impl<T: Array, S> SmallVec<T, S> { #[inline] pub fn into_inner(self) -> Coalesce2<ArrayVec<T>, S> { self.0 } } impl<T: Array, S> From<ArrayVec<T>> for SmallVec<T, S> { fn from(v: ArrayVec<T>) -> Self { SmallVec(Coalesce2::A(v)) } } unsafe impl<T: Array, S: Vector<Item=T::Item> + Spilled<ArrayVec<T>>> Vector for SmallVec<T, S> { type Item = T::Item; #[inline] fn with_capacity(cap: usize) -> Self { SmallVec(if cap > T::len() { Coalesce2::B(S::with_capacity(cap)) } else { Coalesce2::A(Default::default()) }) } #[inline] fn capacity(&self) -> usize { let v = self.0.as_ref(); coalesce!(2 => |v| v.capacity()) } #[inline] fn reserve(&mut self, additional: usize) { let cap = self.capacity() - self.len(); if cap < additional { self.spill(); } else if cap >= additional { return } let v = self.0.as_mut(); coalesce!(2 => |v| v.reserve(additional)) } #[inline] fn reserve_exact(&mut self, additional: usize) { let cap = self.capacity() - self.len(); if cap < additional { self.spill(); } else if cap > additional { return } let v = self.0.as_mut(); coalesce!(2 => |v| v.reserve_exact(additional)) } #[inline] fn shrink_to_fit(&mut self) { let v = self.0.as_mut(); coalesce!(2 => |v| v.shrink_to_fit()) } #[inline] fn into_boxed_slice(self) -> Box<[T::Item]> { let v = self.into_inner(); coalesce!(2 => |v| v.into_boxed_slice()) } #[inline] unsafe fn set_len(&mut self, len: usize) { let v = self.0.as_mut(); coalesce!(2 => |v| v.set_len(len)) } #[inline] fn len(&self) -> usize { let v = self.0.as_ref(); coalesce!(2 => |v| v.len()) } #[inline] fn as_ptr(&self) -> *const T::Item { let v = self.0.as_ref(); coalesce!(2 => |v| v.as_ptr()) } #[inline] fn as_mut_ptr(&mut self) -> *mut T::Item { let v = self.0.as_mut(); coalesce!(2 => |v| v.as_mut_ptr()) } } impl<T: Array, S: Extend<T::Item>> Extend<T::Item> for SmallVec<T, S> where SmallVec<T, S>: Vector { fn extend<I: IntoIterator<Item=T::Item>>(&mut self, iter: I) { let iter = iter.into_iter(); self.reserve(iter.size_hint().0); let v = self.0.as_mut(); coalesce!(2 => |v| v.extend(iter)) } } impl<T: Array, S: Extend<T::Item>> FromIterator<T::Item> for SmallVec<T, S> where SmallVec<T, S>: Vector { fn from_iter<I: IntoIterator<Item=T::Item>>(iter: I) -> Self { let mut s = Self::new(); s.extend(iter); s } } pub struct SmallVecIter<T1, T2>(Coalesce2<T1, T2>); impl<T1: Iterator, T2: Iterator<Item=T1::Item>> Iterator for SmallVecIter<T1, T2> { type Item = T1::Item; #[inline] fn next(&mut self) -> Option<Self::Item> { let v = self.0.as_mut(); coalesce!(2 => |v| v.next()) } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { let v = self.0.as_ref(); coalesce!(2 => |v| v.size_hint()) } #[inline] fn count(self) -> usize { let v = self.0; coalesce!(2 => |v| v.count()) } } impl<T: Array, S: IntoIterator<Item=<ArrayVec<T> as IntoIterator>::Item>> IntoIterator for SmallVec<T, S> where ArrayVec<T>: IntoIterator { type Item = <S as IntoIterator>::Item; type IntoIter = SmallVecIter<<ArrayVec<T> as IntoIterator>::IntoIter, <S as IntoIterator>::IntoIter>; #[inline] fn into_iter(self) -> Self::IntoIter { SmallVecIter(match self.0 { Coalesce2::A(v) => Coalesce2::A(v.into_iter()), Coalesce2::B(v) => Coalesce2::B(v.into_iter()), }) } } impl<T: Array, S: Deref> Deref for SmallVec<T, S> where ArrayVec<T>: Deref<Target=S::Target> { type Target = S::Target; fn deref(&self) -> &Self::Target { let v = self.0.as_ref(); coalesce!(2 => |v| v.deref()) } } impl<T: Array, S: DerefMut> DerefMut for SmallVec<T, S> where ArrayVec<T>: DerefMut + Deref<Target=S::Target> { fn deref_mut(&mut self) -> &mut Self::Target { let v = self.0.as_mut(); coalesce!(2 => |v| v.deref_mut()) } } impl<T: Array, S> Default for SmallVec<T, S> { fn default() -> Self { SmallVec(Coalesce2::A(Default::default())) } } impl<T: Array, S: Hash> Hash for SmallVec<T, S> where ArrayVec<T>: Hash { fn hash<H: Hasher>(&self, h: &mut H) { let v = self.0.as_ref(); coalesce!(2 => |v| v.hash(h)) } } impl<T: Array, S: fmt::Debug + Vector<Item=T::Item> + Spilled<ArrayVec<T>>> fmt::Debug for SmallVec<T, S> where ArrayVec<T>: fmt::Debug { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { let v = self.0.as_ref(); fmt.debug_struct("SmallVec") .field("is_spilled", &self.is_spilled()) .field("capacity", &self.capacity()) .field("data", &coalesce!(2 => |v| v as &dyn fmt::Debug)) .finish() } } impl<'a, T: Array + 'a, S: Vector<Item=T::Item> + 'a> AsRef<dyn Vector<Item=T::Item> + 'a> for SmallVec<T, S> where T::Item: 'a, T::Index: 'a { fn as_ref(&self) -> &(dyn Vector<Item=T::Item> + 'a) { let v = self.0.as_ref(); coalesce!(2 => |v| v as &_) } }
#[doc = "Reader of register INTR_MASKED"] pub type R = crate::R<u32, super::INTR_MASKED>; #[doc = "Reader of field `TX_TRIGGER`"] pub type TX_TRIGGER_R = crate::R<bool, bool>; #[doc = "Reader of field `TX_NOT_FULL`"] pub type TX_NOT_FULL_R = crate::R<bool, bool>; #[doc = "Reader of field `TX_EMPTY`"] pub type TX_EMPTY_R = crate::R<bool, bool>; #[doc = "Reader of field `TX_OVERFLOW`"] pub type TX_OVERFLOW_R = crate::R<bool, bool>; #[doc = "Reader of field `TX_UNDERFLOW`"] pub type TX_UNDERFLOW_R = crate::R<bool, bool>; #[doc = "Reader of field `TX_WD`"] pub type TX_WD_R = crate::R<bool, bool>; #[doc = "Reader of field `RX_TRIGGER`"] pub type RX_TRIGGER_R = crate::R<bool, bool>; #[doc = "Reader of field `RX_NOT_EMPTY`"] pub type RX_NOT_EMPTY_R = crate::R<bool, bool>; #[doc = "Reader of field `RX_FULL`"] pub type RX_FULL_R = crate::R<bool, bool>; #[doc = "Reader of field `RX_OVERFLOW`"] pub type RX_OVERFLOW_R = crate::R<bool, bool>; #[doc = "Reader of field `RX_UNDERFLOW`"] pub type RX_UNDERFLOW_R = crate::R<bool, bool>; #[doc = "Reader of field `RX_WD`"] pub type RX_WD_R = crate::R<bool, bool>; impl R { #[doc = "Bit 0 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn tx_trigger(&self) -> TX_TRIGGER_R { TX_TRIGGER_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn tx_not_full(&self) -> TX_NOT_FULL_R { TX_NOT_FULL_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 4 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn tx_empty(&self) -> TX_EMPTY_R { TX_EMPTY_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn tx_overflow(&self) -> TX_OVERFLOW_R { TX_OVERFLOW_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 6 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn tx_underflow(&self) -> TX_UNDERFLOW_R { TX_UNDERFLOW_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 8 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn tx_wd(&self) -> TX_WD_R { TX_WD_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 16 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn rx_trigger(&self) -> RX_TRIGGER_R { RX_TRIGGER_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 18 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn rx_not_empty(&self) -> RX_NOT_EMPTY_R { RX_NOT_EMPTY_R::new(((self.bits >> 18) & 0x01) != 0) } #[doc = "Bit 19 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn rx_full(&self) -> RX_FULL_R { RX_FULL_R::new(((self.bits >> 19) & 0x01) != 0) } #[doc = "Bit 21 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn rx_overflow(&self) -> RX_OVERFLOW_R { RX_OVERFLOW_R::new(((self.bits >> 21) & 0x01) != 0) } #[doc = "Bit 22 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn rx_underflow(&self) -> RX_UNDERFLOW_R { RX_UNDERFLOW_R::new(((self.bits >> 22) & 0x01) != 0) } #[doc = "Bit 24 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn rx_wd(&self) -> RX_WD_R { RX_WD_R::new(((self.bits >> 24) & 0x01) != 0) } }
use super::{parse_macro, Compiler, DispError, DispResult, MacroMap, Token}; use std::collections::HashMap; use std::rc::Rc; #[derive(Debug)] pub struct UnparsedFunction { pub args: Vec<String>, pub body: Token, } impl UnparsedFunction { pub fn new(args: Vec<String>, body: Token) -> UnparsedFunction { return UnparsedFunction { args, body }; } } /// A FunctionMap of string to unparsed functions. /// The UnparsedFunction is reference counted because it /// is eventually spread across multiple specialized functions /// definitions in the future. pub type FunctionMap = HashMap<String, Rc<UnparsedFunction>>; /// consume tokens, subdividing them into function and macro declarations. pub fn parse_functions_and_macros( _compiler: &mut Compiler, parent_token: Token, ) -> DispResult<(FunctionMap, MacroMap)> { let mut function_map = HashMap::new(); let mut macro_map = MacroMap::new(); // instructions that are not a part of any function // are automatically added to the main function. let mut main_function_body = vec![]; if let Token::Block(tokens) = parent_token { for token in tokens { match token { // the only token we really need to parse out is the expression, // since that's the only thing that can define a top-level function. // everything else is part of the main function. Token::Expression(e) => match e[0].clone() { Token::Symbol(ref s) => { if **s == "fn" { let (name, function) = parse_function(e)?; function_map.insert(name, function); } else { main_function_body.push(Token::Expression(e)); } } Token::BangSymbol(ref s) => { if **s == "macro" { let (name, macro_instance) = parse_macro(e)?; macro_map.insert(name, macro_instance); } else { main_function_body.push(Token::Expression(e)); } } _ => main_function_body.push(Token::Expression(e)), }, t => main_function_body.push(t), } } } function_map.insert( String::from("main"), Rc::new(UnparsedFunction::new( vec![], Token::Block(main_function_body), )), ); Ok((function_map, macro_map)) } fn parse_function(tokens: Vec<Token>) -> DispResult<(String, Rc<UnparsedFunction>)> { if tokens.len() != 4 { return Err(DispError::new(&format!( "A function declaration should have 4 tokens: fn <name> <args> <body>. found {} for {:?}", tokens.len(), tokens ))); } let name = { if let Token::Symbol(ref s) = tokens[1] { s.clone() } else { return Err(DispError::new(&format!( "function name must be a symbol, found {}", &tokens[1] ))); } }; if cfg!(feature = "debug") { println!("parse function: {}", &name); } if *name == "main" { return Err(DispError::new("unable to name function main")); } let args = { if let Token::List(ref raw_list) = tokens[2] { let mut args = vec![]; for arg in raw_list { match arg { Token::Symbol(s) => { args.push((**s).clone()); } _ => { return Err(DispError::new("argument parameter should be a string")); } } } args } else { return Err(DispError::new(&format!( "function args must be a list of symbols, found {}", &tokens[2] ))); } }; return Ok(( *name, Rc::new(UnparsedFunction::new(args, tokens[3].clone())), )); }