text
stringlengths
8
4.13M
use crate::container::incomplete_vector_from_container_response; use crate::container::responses::ListContainersResponse; use crate::core::prelude::*; use azure_core::headers::add_optional_header; use azure_core::headers::request_id_from_headers; use azure_core::prelude::*; use futures::stream::{unfold, Stream}; use http::method::Method; use http::status::StatusCode; #[derive(Debug, Clone)] pub struct ListContainersBuilder<'a> { storage_client: &'a StorageClient, prefix: Option<Prefix<'a>>, next_marker: Option<NextMarker>, include_metadata: bool, include_deleted: bool, max_results: Option<MaxResults>, client_request_id: Option<ClientRequestId<'a>>, timeout: Option<Timeout>, } impl<'a> ListContainersBuilder<'a> { pub(crate) fn new(storage_client: &'a StorageClient) -> Self { Self { storage_client, prefix: None, next_marker: None, include_metadata: false, include_deleted: false, max_results: None, client_request_id: None, timeout: None, } } setters! { prefix: Prefix<'a> => Some(prefix), next_marker: NextMarker => Some(next_marker), include_metadata: bool => include_metadata, include_deleted: bool => include_deleted, max_results: MaxResults => Some(max_results), client_request_id: ClientRequestId<'a> => Some(client_request_id), timeout: Timeout => Some(timeout), } pub async fn execute( &self, ) -> Result<ListContainersResponse, Box<dyn std::error::Error + Sync + Send>> { let mut url = self .storage_client .storage_account_client() .blob_storage_url() .clone(); url.query_pairs_mut().append_pair("comp", "list"); self.prefix.append_to_url_query(&mut url); self.next_marker.append_to_url_query(&mut url); if let Some(include) = match (self.include_metadata, self.include_deleted) { (true, true) => Some("metadata,deleted"), (true, false) => Some("metadata"), (false, true) => Some("deleted"), (false, false) => None, } { url.query_pairs_mut().append_pair("include", include); } self.max_results.append_to_url_query(&mut url); self.timeout.append_to_url_query(&mut url); debug!("generated url = {}", url); let (request, _url) = self.storage_client.prepare_request( url.as_str(), &Method::GET, &|mut request| { request = add_optional_header(&self.client_request_id, request); request }, None, )?; let response = self .storage_client .storage_account_client() .http_client() .execute_request_check_status(request, StatusCode::OK) .await?; debug!("response == {:?}", response); let body = std::str::from_utf8(response.body())?; debug!("body == {}", body); let incomplete_vector = incomplete_vector_from_container_response(body)?; let request_id = request_id_from_headers(response.headers())?; Ok(ListContainersResponse { incomplete_vector, request_id, }) } pub fn stream( self, ) -> impl Stream<Item = Result<ListContainersResponse, Box<dyn std::error::Error + Sync + Send>>> + 'a { #[derive(Debug, Clone, PartialEq)] enum States { Init, NextMarker(NextMarker), } unfold(Some(States::Init), move |next_marker: Option<States>| { let req = self.clone(); async move { debug!("next_marker == {:?}", &next_marker); let response = match next_marker { Some(States::Init) => req.execute().await, Some(States::NextMarker(next_marker)) => { req.next_marker(next_marker).execute().await } None => return None, }; // the ? operator does not work in async move (yet?) // so we have to resort to this boilerplate let response = match response { Ok(response) => response, Err(err) => return Some((Err(err), None)), }; // If we have a next marker, let's wrap it // in a States::NextMarker and pass it to the next execution. // If not, we'll obtain None that will end the loop. let next_marker = response .incomplete_vector .next_marker() .map(|next_marker| States::NextMarker(next_marker.clone())); Some((Ok(response), next_marker)) } }) } }
#[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct OAuth2Application { pub client_id: Option<String>, pub client_secret: Option<String>, pub created: Option<String>, pub id: Option<i64>, pub name: Option<String>, pub redirect_uris: Option<Vec<String>>, } impl OAuth2Application { /// Create a builder for this object. #[inline] pub fn builder() -> OAuth2ApplicationBuilder { OAuth2ApplicationBuilder { body: Default::default(), } } #[inline] pub fn user_get_oauth2_application() -> OAuth2ApplicationGetBuilder { OAuth2ApplicationGetBuilder { param_page: None, param_limit: None, } } #[inline] pub fn user_get_o_auth2_application() -> OAuth2ApplicationGetBuilder1<crate::generics::MissingId> { OAuth2ApplicationGetBuilder1 { inner: Default::default(), _param_id: core::marker::PhantomData, } } } impl Into<OAuth2Application> for OAuth2ApplicationBuilder { fn into(self) -> OAuth2Application { self.body } } /// Builder for [`OAuth2Application`](./struct.OAuth2Application.html) object. #[derive(Debug, Clone)] pub struct OAuth2ApplicationBuilder { body: self::OAuth2Application, } impl OAuth2ApplicationBuilder { #[inline] pub fn client_id(mut self, value: impl Into<String>) -> Self { self.body.client_id = Some(value.into()); self } #[inline] pub fn client_secret(mut self, value: impl Into<String>) -> Self { self.body.client_secret = Some(value.into()); self } #[inline] pub fn created(mut self, value: impl Into<String>) -> Self { self.body.created = Some(value.into()); self } #[inline] pub fn id(mut self, value: impl Into<i64>) -> Self { self.body.id = Some(value.into()); self } #[inline] pub fn name(mut self, value: impl Into<String>) -> Self { self.body.name = Some(value.into()); self } #[inline] pub fn redirect_uris(mut self, value: impl Iterator<Item = impl Into<String>>) -> Self { self.body.redirect_uris = Some(value.map(|value| value.into()).collect::<Vec<_>>().into()); self } } /// Builder created by [`OAuth2Application::user_get_oauth2_application`](./struct.OAuth2Application.html#method.user_get_oauth2_application) method for a `GET` operation associated with `OAuth2Application`. #[derive(Debug, Clone)] pub struct OAuth2ApplicationGetBuilder { param_page: Option<i64>, param_limit: Option<i64>, } impl OAuth2ApplicationGetBuilder { /// page number of results to return (1-based) #[inline] pub fn page(mut self, value: impl Into<i64>) -> Self { self.param_page = Some(value.into()); self } /// page size of results #[inline] pub fn limit(mut self, value: impl Into<i64>) -> Self { self.param_limit = Some(value.into()); self } } impl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for OAuth2ApplicationGetBuilder { type Output = Vec<OAuth2Application>; const METHOD: http::Method = http::Method::GET; fn rel_path(&self) -> std::borrow::Cow<'static, str> { "/user/applications/oauth2".into() } fn modify(&self, req: Client::Request) -> Result<Client::Request, crate::client::ApiError<Client::Response>> { use crate::client::Request; Ok(req .query(&[ ("page", self.param_page.as_ref().map(std::string::ToString::to_string)), ("limit", self.param_limit.as_ref().map(std::string::ToString::to_string)) ])) } } /// Builder created by [`OAuth2Application::user_get_o_auth2_application`](./struct.OAuth2Application.html#method.user_get_o_auth2_application) method for a `GET` operation associated with `OAuth2Application`. #[repr(transparent)] #[derive(Debug, Clone)] pub struct OAuth2ApplicationGetBuilder1<Id> { inner: OAuth2ApplicationGetBuilder1Container, _param_id: core::marker::PhantomData<Id>, } #[derive(Debug, Default, Clone)] struct OAuth2ApplicationGetBuilder1Container { param_id: Option<i64>, } impl<Id> OAuth2ApplicationGetBuilder1<Id> { /// Application ID to be found #[inline] pub fn id(mut self, value: impl Into<i64>) -> OAuth2ApplicationGetBuilder1<crate::generics::IdExists> { self.inner.param_id = Some(value.into()); unsafe { std::mem::transmute(self) } } } impl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for OAuth2ApplicationGetBuilder1<crate::generics::IdExists> { type Output = OAuth2Application; const METHOD: http::Method = http::Method::GET; fn rel_path(&self) -> std::borrow::Cow<'static, str> { format!("/user/applications/oauth2/{id}", id=self.inner.param_id.as_ref().expect("missing parameter id?")).into() } }
use hydroflow::tokio_stream::wrappers::IntervalStream; use hydroflow::util::cli::{ConnectedDemux, ConnectedDirect, ConnectedSink}; use hydroflow::util::serialize_to_bytes; use hydroflow_datalog::datalog; #[hydroflow::main] async fn main() { let mut ports = hydroflow::util::cli::init().await; let broadcast_port = ports .port("broadcast") .connect::<ConnectedDemux<ConnectedDirect>>() .await; let (peers, sender_i): (Vec<u32>, u32) = serde_json::from_str(&std::env::args().nth(1).unwrap()).unwrap(); let broadcast_sink = broadcast_port.into_sink(); let periodic = IntervalStream::new(tokio::time::interval(std::time::Duration::from_secs(1))); let to_repeat = vec![ (format!("Hello {sender_i}"),), (format!("world {sender_i}"),), ]; let df = datalog!( r#" .input repeated `spin() -> flat_map(|_| to_repeat.iter().cloned())` .input periodic `source_stream(periodic) -> map(|_| ())` .input peers `source_iter(peers.clone()) -> persist() -> map(|p| (p,))` .async broadcast `map(|(node_id, v)| (node_id, serialize_to_bytes(v))) -> dest_sink(broadcast_sink)` `null::<(String,)>()` broadcast@n(x) :~ repeated(x), periodic(), peers(n) "# ); hydroflow::util::cli::launch_flow(df).await; }
use crate::days::day7::{parse_input, default_input, Bag}; use std::collections::{HashMap, HashSet, VecDeque}; pub fn run() { println!("{}", color_bags_str(default_input()).unwrap()) } pub fn color_bags_str(input : &str) -> Result<i32, ()> { color_bags(parse_input(input)) } pub fn color_bags(rules : HashMap<String, Vec<Bag>>) -> Result<i32, ()> { let mut colors = HashSet::new(); colors.insert("shiny gold"); let mut queue = VecDeque::new(); queue.push_front("shiny gold"); let mut count = 0; while !queue.is_empty() { let current = queue.pop_front().unwrap(); for (rule, bags) in &rules { if colors.contains(&**rule) { continue } if bags.iter().any(|b| b.name == current) { count += 1; colors.insert(rule); queue.push_back(rule); } } } Ok(count) } #[cfg(test)] mod tests { use super::*; #[test] fn part1_answer() { assert_eq!(335, color_bags_str(default_input()).unwrap()) } }
use console::style; use dialoguer::{theme, Input, Select}; use std::fs; use std::path::{Path, PathBuf}; use shared_child::SharedChild; use std::sync::Arc; use std::sync::RwLock; use std::net::TcpStream; use std::net::ToSocketAddrs; use crate::er::{self, Result}; pub struct ConfigDir(pub PathBuf); impl ConfigDir { pub fn new(config_root: &PathBuf, folder: &str) -> ConfigDir { let mut config_dir = config_root.clone(); config_dir.push(folder); ConfigDir(config_dir) } pub fn filepath(&self, file: &str) -> PathBuf { let mut filepath = self.0.clone(); filepath.push(file); filepath } pub fn has_file(&self, file: &str) -> bool { let mut test = self.0.clone(); test.push(file); test.is_file() } /// Write to a file relative to config dir /// Will ensure parent dir exist and create if not pub fn write(&self, file: &str, content: &str) -> Result<()> { write_file(&self.filepath(file), content) } } pub struct ConfigDirs { pub git_accounts: ConfigDir, pub projects: ConfigDir, pub servers: ConfigDir, pub config_root: PathBuf, } impl ConfigDirs { pub fn new(projects_dir: PathBuf) -> ConfigDirs { let mut config_root = projects_dir.clone(); config_root.push(".config"); ConfigDirs { git_accounts: ConfigDir::new(&config_root, "git_accounts"), projects: ConfigDir::new(&config_root, "projects"), servers: ConfigDir::new(&config_root, "servers"), config_root, } } } pub struct CliEnv { pub projects_dir: PathBuf, pub workdir_dir: PathBuf, pub config_dirs: ConfigDirs, theme: theme::ColorfulTheme, } pub enum SelectOrAdd { Selected(usize), AddNew, } pub enum SelectOrAddOrNone { Selected(usize), AddNew, None, } impl CliEnv { pub fn new(projects_dir: PathBuf, workdir_dir: PathBuf) -> CliEnv { CliEnv { projects_dir: projects_dir.clone(), workdir_dir, config_dirs: ConfigDirs::new(projects_dir), theme: theme::ColorfulTheme::default(), } } pub fn get_input(&self, prompt: &str, default: Option<String>) -> Result<String> { // console crate uses stderr let term = console::Term::stderr(); let mut input_build = Input::<String>::with_theme(&self.theme); input_build.with_prompt(&prompt); default.iter().for_each(|default| { input_build.default(default.to_owned()); }); let input = input_build.interact_on(&term)?; let input = input.trim(); let resolved = if input != "" { String::from(input) } else { match default { Some(default) => default.into(), _ => String::from(input), } }; // Replace previous line with resolved value term.clear_last_lines(1)?; term.write_line(&format!("{}: {}", prompt, style(&resolved).magenta()))?; Ok(resolved) } pub fn get_pass(&self, prompt: &str) -> Result<String> { let mut input_build = dialoguer::PasswordInput::with_theme(&self.theme); input_build.with_prompt(&prompt); input_build.interact().map_err(|e| er::Io::e(e).into()) } // todo: Add new. Select none. Handle 0 items pub fn select<T: ToString + std::cmp::PartialEq + Clone>( &self, prompt: &str, items: &Vec<T>, default: Option<usize>, ) -> Result<usize> { let prompt = match default { Some(default) => match items.get(default) { Some(default_val) => { format!("{} ({})", prompt, style(default_val.to_string()).dim()) } None => prompt.to_string(), }, None => String::from(prompt), }; let mut select_build = Select::with_theme(&self.theme); select_build.with_prompt(&prompt).items(items); select_build.default(default.unwrap_or(0)); let index = select_build.interact()?; Ok(index) } pub fn select_or_add_or_none<T>( &self, prompt: &str, items: &Vec<T>, default: Option<usize>, ) -> Result<SelectOrAddOrNone> where T: ToString + std::cmp::PartialEq + Clone, { // Inject none at the beginning let mut items2 = vec!["NONE".to_string()]; items2.append(&mut items.into_iter().map(|i| i.to_string()).collect()); // Account for added NONE let default = default.map(|default| default + 1); match self.select_or_add(prompt, &items2, default)? { SelectOrAdd::Selected(selected) => { if selected == 0 { Ok(SelectOrAddOrNone::None) } else { Ok(SelectOrAddOrNone::Selected(selected - 1)) } } SelectOrAdd::AddNew => Ok(SelectOrAddOrNone::AddNew), } } pub fn select_or_add<T>( &self, prompt: &str, items: &Vec<T>, default: Option<usize>, ) -> Result<SelectOrAdd> where T: ToString + std::cmp::PartialEq + Clone, { if items.len() > 0 { // Append "add new" option to items let num_regular = items.len(); let mut items2 = items.iter().map(|i| i.to_string()).collect::<Vec<String>>(); items2.push("ADD NEW".to_string()); let select_res = self.select(prompt, &items2, default)?; if select_res < num_regular { Ok(SelectOrAdd::Selected(select_res)) } else { Ok(SelectOrAdd::AddNew) } } else { Ok(SelectOrAdd::AddNew) } } pub fn error_msg(&self, msg: &str) { println!("{}", style(msg).red()); } pub fn get_project_path(&self, extra: &str) -> PathBuf { let mut cloned = self.projects_dir.clone(); cloned.push(extra); cloned } pub fn display_result<T>(&self, result: Result<T>) { match result { Ok(_) => (), Err(err) => self.error_msg(&format!("{:?}", err)), } } } pub fn entries_in_dir(dir: &Path) -> Result<Vec<PathBuf>> { let mut entries = Vec::new(); if !dir.is_dir() { return er::err("Given path is not a directory"); } for entry in fs::read_dir(dir)? { let entry = entry?; let path = entry.path(); if path.is_dir() || path.is_file() { entries.push(path); } } Ok(entries) } pub fn files_in_dir(dir: &Path) -> Result<Vec<String>> { if dir.is_dir() { let mut entries = Vec::new(); for entry in std::fs::read_dir(dir)? { let entry = entry?; match entry.file_name().into_string() { Ok(string) => entries.push(string), Err(_) => (), } } Ok(entries) } else { Ok(Vec::new()) } } pub fn name_after_prefix(full_path: &Path, prefix: &Path) -> Result<String> { match full_path.strip_prefix(&prefix) { Ok(stripped) => { // Todo: Could verify one component match stripped.file_name() { Some(name) => Ok(name.to_string_lossy().to_string()), None => er::err(format!("Could not get name from: {:?}", stripped)), } } Err(e) => er::err(format!("Error strip prefix: {:?}", e)), } } pub fn file_name_string(path: &Path) -> Result<String> { match path.file_name() { Some(file_name) => Ok(file_name.to_string_lossy().to_string()), None => er::err(format!("Could not get file_name from {:?}", path)), } } pub fn ensure_parent_dir(path: &Path) -> Result<()> { match path.parent() { Some(parent) => { if parent.is_dir() { Ok(()) } else { fs::create_dir_all(parent).map_err(|e| er::Io::e(e).into()) } } None => er::err(format!("Could not resolve parent of {:?}", path)), } } /// Ensures parent dir and writes content pub fn write_file(path: &Path, content: &str) -> Result<()> { ensure_parent_dir(path)?; fs::write(path, content).map_err(|e| er::Io::e(e).into()) } /// There should only be one current process /// so we can register a global ctrlc handler // SharedChild, then end_on_ctrlc pub struct CurrentProcess(Arc<RwLock<Option<(SharedChild, bool)>>>); // Todo: Go over error handling impl CurrentProcess { pub fn new() -> CurrentProcess { let current_process: Arc<RwLock<Option<(SharedChild, bool)>>> = Arc::new(RwLock::new(None)); let current_process_ctrlc = current_process.clone(); // todo: Possibly ctrl-c is forwarded to the command anyway // through stdin? match ctrlc::set_handler(move || { let current_process = match current_process_ctrlc.read() { Ok(lock) => lock, Err(e) => { println!("Error aquiring lock: {:?}", e); return (); } }; match &*current_process { Some((process, end_on_ctrlc)) => { if *end_on_ctrlc { match process.kill() { Ok(_) => { println!("Ended process by ctrl-c"); () } Err(e) => println!("Error ending dev process: {:?}", e), } } } None => { println!("No current process in ctrlc"); std::process::exit(0); } } // CurrentProcess will be set to none in // dedicated process wait thread }) { Ok(_) => (), Err(e) => println!("Ctrlc error: {:?}", e), } CurrentProcess(current_process) } pub fn spawn_and_wait( self, mut cmd: std::process::Command, end_on_ctrlc: bool, ) -> Result<Self> { // Spawn and put into shared value with ctrlc { let shared_child = shared_child::SharedChild::spawn(&mut cmd)?; // By default, inherit stdin, out, err match self.0.write() { Ok(mut write_lock) => *write_lock = Some((shared_child, end_on_ctrlc)), Err(e) => { println!("Couldn't aqcuire write lock: {:?}", e); } } } let wait_clone = self.0.clone(); let thread = std::thread::spawn(move || { { let reader = match wait_clone.read() { Ok(reader) => reader, Err(e) => { print!("Could not get read lock: {:?}", e); return (); } }; let wait_process = match &*reader { Some((wait_process, _)) => wait_process, None => return (), }; match wait_process.wait() { Ok(exit_status) => { println!("Exited dev process with status: {}", exit_status); } Err(e) => { println!("Error waiting for process: {:?}", e); return (); } } } // Remove from current process match wait_clone.write() { Ok(mut lock) => { *lock = None; } Err(e) => println!("Failed getting write on current process: {:?}", e), } }); let _thread_res = match thread.join() { Ok(_res) => { println!("Joined thread"); } Err(e) => println!("Error ending process: {:?}", e), }; Ok(self) } } pub fn wait_for<A: ToSocketAddrs>(addr: A) -> bool { let mut attempts = 0; let max_attempts = 15; loop { match TcpStream::connect(&addr) { Ok(_) => { // todo: Better solution.. if attempts > 0 { // If server is getting up, allow some time std::thread::sleep(std::time::Duration::from_millis(2000)); } return true; } Err(e) => { println!("Could not connect, retrying..."); attempts = attempts + 1; if attempts >= max_attempts { format!("Aborting after max attempts: {}, {:?}", max_attempts, e); return false; } std::thread::sleep(std::time::Duration::from_millis(1500)); } } } } pub fn now_formatted() -> String { let system_time = std::time::SystemTime::now(); let datetime: chrono::DateTime<chrono::Utc> = system_time.into(); format!("{}", datetime.format("%Y-%m-%d %T")) } /// Convenience to build string vecs while /// accepting Into<String> pub struct StringVec { vec: Vec<String>, } impl StringVec { pub fn new() -> Self { StringVec { vec: Vec::new() } } pub fn push(&mut self, x: impl Into<String>) -> &mut Self { self.vec.push(x.into()); self } pub fn join(&self, separator: &str) -> String { self.vec.join(separator) } }
extern crate mioco; extern crate time; use mioco::Mioco; fn skynet(num: u32, size: u32, div: u32) -> u64 { if size == 1 { return num as u64; } let mut sum = 0u64; let mut v = Vec::with_capacity(div as usize); for i in 0..div { let sd: u32 = size / div as u32; let sub_num: u32 = num + i as u32 * sd; v.push(mioco::spawn(move || -> u64 { skynet(sub_num, sd, div) })); } for c in v { sum += c.join().unwrap() } sum } fn main() { let mut config = mioco::Config::new(); config.event_loop().notify_capacity(80_000); Mioco::new_configured(config).start(|| { let started_at = time::get_time(); let result = mioco::spawn(move || { skynet(0, 1000000, 10) }).join().unwrap(); let duration = time::get_time() - started_at; println!("Result {} in {} ms.", result, duration.num_milliseconds()); }).unwrap(); }
use std::path::Path; use serde_json::Value; use veloci::*; use super::common; static TEST_FOLDER: &str = "test_all"; lazy_static! { static ref TEST_PERSISTENCE: persistence::Persistence = { let indices = r#" ["*GLOBAL*"] features = ["All"] ["commonness"] facet = true ["commonness".boost] boost_type = "f32" ["ent_seq".fulltext] tokenize = true ["nofulltext".fulltext] tokenize = false ["tags[]"] facet = true ["field1[].rank".boost] boost_type = "f32" ["field1[].text"] tokenize = true ["kanji[].text"] tokenize = true ["meanings.ger[]"] stopwords = ["stopword"] fulltext.tokenize = true ["meanings.eng[]".fulltext] tokenize = true ["kanji[].commonness".boost] boost_type = "f32" ["kana[].commonness".boost] boost_type = "f32" "#; let token_values = Some((r#"[{"text": "Begeisterung", "value": 20 } ]"#.to_string(), json!({"path": "meanings.ger[]"}))); common::create_test_persistence(TEST_FOLDER, indices, get_test_data().to_string().as_bytes(), token_values) }; } pub fn get_test_data() -> Value { json!([ { "ignore_field":"", "commonness": 123456, "ent_seq": "99999", "tags": ["nice", "cool"] }, { "nofulltext":"my tokens", "commonness": 20, "tags": ["nice", "cool"], "kanji": [ { "text": "偉容", "commonness": 0}, { "text": "威容","commonness": 5} ], "kana": [ { "text": "いよう", "romaji": "Iyou", "commonness": 5 } ], "meanings": { "eng" : ["karlo","dignity", "majestic appearance", "will testo"], "ger": ["majestätischer Anblick (m)", "majestätisches Aussehen (n)", "Majestät (f)"] }, "ent_seq": "1587680" }, { "commonness": 20, "tags": ["nice"], "kanji": [ { "text": "意欲", "commonness": 40}, { "text": "意慾", "commonness": 0} ], "kana": [ { "text": "いよく", "romaji": "Iyoku", "commonness": 40 } ], "meanings": { "eng" : ["will", "urge", "having a long torso"], "ger": ["Wollen (n)", "Wille (m)", "Begeisterung (f)", "begeistern"] }, "ent_seq": "1587690" }, { "meanings": { "eng" : ["karl der große"], } }, { "id": 1234566, "gender": "male", "tags": ["awesome", "cool"], "sinlge_value_multi": ["wert"], "birthDate": "1960-08-19", "address": [ { "line": ["nuts strees"] }, { "line": ["asdf"] } ], "commonness": 500, "kanji": [ { "text": "意慾", "commonness": 20} ], "field1" : [{"text":"awesome", "rank":1}], "kana": [ { "text": "いよく" } ], "meanings": { "eng" : ["test1"], "ger": ["der test", "das ist ein guter Treffer"] }, "ent_seq": "1587700" }, { "id": 123456, "tags": ["nice", "cool"], "gender": "female", "birthDate": "1950-08-19", "address": [ { "line": [ "71955 Ilene Brook" ] } ], "commonness": 551, "kanji": [ { "text": "何の", "commonness": 526 } ], "field1" : [{"text":"awesome"}, {"text":"nixhit"}], "kana": [ { "text": "どの", "romaji": "Dono", "commonness": 25 } ], "meanings": { "ger": ["welch", "guter nicht Treffer", "alle meine Words", "text", "localität"] }, "ent_seq": "1920240", "mylongtext": "Prolog:\nthis is a story of a guy who went out to rule the world, but then died. the end" }, { "pos": [ "adj-i" ], "commonness": 1, "misc": [], "tags": ["nice", "cool", "Prolog:\nthis is a story of a guy who went out to rule the world, but then died. the end"], "kanji": [ { "text": "柔らかい", "commonness": 57 } ], "kana": [ { "text": "やわらかい", "romaji": "Yawarakai", "commonness": 30 } ], "meanings": { "ger": [ "(1) weich", "stopword" ] }, "ent_seq": "1605630" }, { "meanings": { "ger": [ "(1) 2 3 super nice weich" // add wich with no commonness ] }, "ent_seq": "9555" }, { "meanings": { "ger": ["text localität", "alle meine Words"] }, "ent_seq": "1000" }, { "sub_level": [{"text":"Prolog:\nthis is story of a guy who went out to rule the world, but then died. the end"}], "commonness": 515151, "ent_seq": "25", "tags": ["nice", "cool"] }, { "title": "Die Erbin die Sünde", "type": "taschenbuch" }, { "title": "Die Erbin", "type": "taschenbuch" }, { "commonness": 30, "title": "COllectif", "meanings": {"ger": ["boostemich"] } }, { "commonness": 30, "float_value": 5.123, "ent_seq": "26", "tags": ["nice", "coolo"] }, { "commonness": 20, "ent_seq": "27", "my_bool" : true, "tags": ["Eis", "cool"] }, { "commonness": 20, "ent_seq": "28", "tags": ["nice", "cool"] } ]) } // TODO enable // #[test] // fn test_json_request() { // let requesto: search::Request = serde_json::from_str(r#"{"search":{"path":"asdf", "terms":[ "asdf"], "levenshtein_distance":1}}"#).unwrap(); // assert_eq!(requesto.search.unwrap().get_options().levenshtein_distance, Some(1)); // } #[test] fn test_create_index_from_file() { let db = "test_files/test_from_file".to_string(); if Path::new(&db).exists() { std::fs::remove_dir_all(&db).unwrap(); } let mut pers = persistence::Persistence::create_mmap(db).unwrap(); create::create_indices_from_file(&mut pers, "test_files/test_data.json", "", false).unwrap(); } #[test] fn simple_search() { let req = json!({ "search": { "terms":["urge"], "path": "meanings.eng[]" } }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits.len(), 1); assert_eq!(hits[0].doc["ent_seq"], "1587690"); assert_eq!(hits[0].doc["commonness"], 20); assert_eq!(hits[0].doc["tags"], json!(["nice".to_string()])); } #[test] fn return_execution_time() { let req = json!({ "search": { "terms":["urge"], "path": "meanings.eng[]" } }); let res = search_request_json_to_doc!(req); assert_gt!(res.execution_time_ns, 1); } // #[test] // fn levenshtein_lowercase_regression() { // let req = json!({ // "search": { // "terms":["COllectif"], // "path": "title", // "levenshtein_distance": 2 // } // }); // let hits = search_request_json_to_doc!(req).data; // assert_eq!(hits.len(), 1); // assert_eq!(hits[0].doc["title"], "COllectif"); // } #[test] fn simple_search_skip_far() { let req = json!({ "search_req": { "search": { "terms":["urge"], "path": "meanings.eng[]" } }, "skip": 1000 }); let hits = search_testo_to_doc!(req).data; assert_eq!(hits.len(), 0); } #[test] fn simple_search_case_sensitive() { let req = json!({ "search": { "ignore_case": true, "terms":["Urge"], "path": "meanings.eng[]" } }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits.len(), 1); let req = json!({ "search": { "ignore_case": false, "terms":["Urge"], "path": "meanings.eng[]" } }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits.len(), 0); } #[test] fn simple_search_explained() { let req = json!({ "search": { "terms":["urge"], "path": "meanings.eng[]", "options": {"explain":true} } }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits.len(), 1); assert_eq!(hits[0].doc["ent_seq"], "1587690"); assert_eq!(hits[0].doc["commonness"], 20); assert_eq!(hits[0].doc["tags"], json!(["nice".to_string()])); // assert_eq!(hits[0].explain, Some(to_vec(&["term score 10.0 * anchor score 3.68 to 36.8", "levenshtein score 10.0 for urge"]))); assert_eq!(hits[0].explain.as_ref().unwrap().len(), 2); } #[test] fn or_query_explained() { let req = json!({ "search_req": { "or":{ "queries": [ {"search": { "terms":["majestät"], "path": "meanings.ger[]" }}, {"search": { "terms":["urge"], "path": "meanings.eng[]" }} ] } }, "explain":true }); println!("yo"); let hits = search_testo_to_doc!(req).data; assert_eq!(hits.len(), 2); assert_eq!(hits[0].doc["ent_seq"], "1587690"); // assert_eq!(hits[0].explain, Some(to_vec(&["or sum_over_distinct_terms 36.8125", "term score 10.0 * anchor score 3.68 to 36.8", "levenshtein score 10.0 for urge"]))); assert_eq!(hits[0].explain.as_ref().unwrap().len(), 5); } #[test] fn test_float() { let req = json!({ "search": { "terms":["5.123"], "path": "float_value" } }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits.len(), 1); assert_eq!(hits[0].doc["float_value"], 5.123); } #[test] fn test_bool() { let req = json!({ "search": { "terms":["true"], "path": "my_bool" } }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits.len(), 1); assert_eq!(hits[0].doc["my_bool"], true); } #[test] fn should_return_an_error_when_trying_to_query_an_invalid_field() { let req = json!({ "search": { "terms":["test"], "path": "notexisting" } }); let search_request: search::SearchRequest = serde_json::from_str(&req.to_string()).expect("Can't parse json"); let requesto = search::Request { search_req: Some(search_request), ..Default::default() }; let hits = search_to_hits!(requesto); assert_eq!(format!("{}", hits.unwrap_err()), "field does not exist notexisting.textindex (fst not found)".to_string()) } #[test] fn select_fields() { let req = json!({ "search_req": { "search": { "terms":["urge"], "path": "meanings.eng[]" }}, "select": ["ent_seq", "tags[]"] }); let hits = search_testo_to_doc!(req).data; assert_eq!(hits.len(), 1); assert_eq!(hits[0].doc["ent_seq"], "1587690"); assert_eq!(hits[0].doc.get("commonness"), None); // didn't select assert_eq!(hits[0].doc["tags"], json!(["nice".to_string()])); } #[test] fn two_tokens_h_test_fn_the_same_anchor() { let req = json!({ "search": { "terms":["majestätischer"], "path": "meanings.ger[]", "levenshtein_distance": 1 } }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits.len(), 1); assert_eq!(hits[0].doc["ent_seq"], "1587680"); } #[test] fn deep_structured_objects() { let req = json!({ "search": { "terms":["brook"], "path": "address[].line[]", "levenshtein_distance": 1 } }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits.len(), 1); assert_eq!(hits[0].doc["id"], 123456); } #[test] fn should_search_without_first_char_exact_match() { let req = json!({ "search": { "terms":["najestätischer"], "path": "meanings.ger[]", "levenshtein_distance": 1 } }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits.len(), 1); assert_eq!(hits[0].doc["ent_seq"], "1587680"); } #[test] fn should_prefer_exact_matches_to_tokenmatches() { let req = json!({ "search": { "terms":["will"], "path": "meanings.eng[]", "levenshtein_distance": 1 } }); let wa = search_request_json_to_doc!(req).data; assert_eq!(wa[0].doc["meanings"]["eng"][0], "will"); } #[test] fn test_prefer_exact_match_over_multi_hit() { let dir = "test_prefer_exact_match_over_multi_hit"; // Exact match is more important than many non exact hits let test_data = r#" { "definition": ["home"], "traditional": "家" } { "definition": ["to live at home", "to stay at home", "home (schooling etc)", "le home", "ok home", "so much home"], "traditional": "居家"} "#; let indices = r#""#; let pers: persistence::Persistence = common::create_test_persistence(dir, indices, test_data.as_bytes(), None); let req = json!({ "search_req": { "search": { "terms":["home"], "path": "definition[]", "levenshtein_distance": 0, "firstCharExactMatch":true }} }); let requesto: search::Request = serde_json::from_str(&req.to_string()).expect("Can't parse json"); let hits = search::to_search_result(&pers, search::search(requesto.clone(), &pers).expect("search error"), &requesto.select).data; assert_eq!(hits[0].doc["traditional"], "家"); assert_eq!(hits[1].doc["traditional"], "居家"); } #[test] fn test_exact_match_with_boost() { let dir = "test_exact_match_with_boost"; // Exact match is more important than many non exact hits let test_data = r#" { "definition": ["home", "family"], "traditional": "家", "commonness": 5.5318 } { "definition": ["place to return to", "home", "final destination", "ending"], "traditional": "歸宿", "commonness": 3.1294} "#; let indices = r#" [commonness.boost] boost_type = 'f32' "#; let pers: persistence::Persistence = common::create_test_persistence(dir, indices, test_data.as_bytes(), None); let req = json!({ "search_req": { "search": { "terms":["home"], "path": "definition[]", "levenshtein_distance": 0, "firstCharExactMatch":true }}, "boost" : [{ "path":"commonness", "boost_fun": "Log10", "param": 1 }] }); let requesto: search::Request = serde_json::from_str(&req.to_string()).expect("Can't parse json"); let hits = search::to_search_result(&pers, search::search(requesto.clone(), &pers).expect("search error"), &requesto.select).data; assert_eq!(hits[0].doc["traditional"], "家"); assert_eq!(hits[1].doc["traditional"], "歸宿"); } #[test] fn should_prefer_exact_tokenmatches_to_fuzzy_text_hits() { let req = json!({ "search_req": { "search": { "terms":["karl"], "path": "meanings.eng[]", "levenshtein_distance": 1 }}, "explain":true }); let wa = search_testo_to_doc!(req).data; println!("{}", serde_json::to_string_pretty(&wa).unwrap()); assert_eq!(wa[0].doc["meanings"]["eng"][0], "karl der große"); // should hit karl, not karlo } #[test] fn should_prefer_short_results() { let mut params = query_generator::SearchQueryGeneratorParameters::default(); params.phrase_pairs = Some(true); params.explain = Some(true); params.search_term = "die erbin taschenbuch".to_string(); let hits = search_testo_to_doco_qp!(params).data; assert_eq!(hits[0].doc["title"], "Die Erbin"); } #[test] fn should_search_word_non_tokenized() { let req = json!({ "search": { "terms":["偉容"], "path": "kanji[].text" } }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits.len(), 1); assert_eq!(hits[0].doc["ent_seq"], "1587680"); } #[test] fn should_check_disabled_tokenization() { let req = json!({ "search": { "terms":["tokens"], "path": "nofulltext" } }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits.len(), 0); } #[test] fn should_search_on_non_subobject() { let req = json!({ "search": { "terms":["1587690"], "path": "ent_seq" } }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits.len(), 1); } #[test] fn and_connect_hits_same_field() { let req = json!({ "and":{ "queries":[ {"search": {"terms":["aussehen"], "path": "meanings.ger[]"}}, {"search": {"terms":["majestätisches"], "path": "meanings.ger[]"}} ]} }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits.len(), 1); assert_eq!(hits[0].doc["ent_seq"], "1587680"); } #[test] fn and_connect_hits_different_fields() { let req = json!({ "and":{ "queries":[ {"search": {"terms":["majestät"], "path": "meanings.ger[]"}}, {"search": {"terms":["majestic"], "path": "meanings.eng[]"}} ]} }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits.len(), 1); assert_eq!(hits[0].doc["ent_seq"], "1587680"); } #[test] fn and_connect_hits_different_fields_no_hit() { let req = json!({ "and":{ "queries":[ {"search": { "terms":["majestät"], "path": "meanings.ger[]" }}, {"search": { "terms":["urge"], "path": "meanings.eng[]" }} ]} }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits.len(), 0); } #[test] fn and_connect_hits_different_fields_same_text_alle_meine_words_appears_again() { let req = json!({ "and":{ "queries":[ {"search": { "terms":["words"], "path": "meanings.ger[]" }}, {"search": { "terms":["1000"], "path": "ent_seq" }} ]} }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits.len(), 1); assert_eq!(hits[0].doc["ent_seq"], "1000"); } #[test] fn or_connect_hits_with_top() { let req = json!({ "search_req": { "or":{"queries":[ {"search": { "terms":["majestät"], "path": "meanings.ger[]" }}, {"search": { "terms":["urge"], "path": "meanings.eng[]" }} ]} }, "top":1 }); let hits = search_testo_to_doc!(req).data; assert_eq!(hits[0].doc["ent_seq"], "1587690"); assert_eq!(hits.len(), 1); } #[test] fn or_connect_hits() { let req = json!({ "or":{"queries":[ {"search": { "terms":["majestät"], "path": "meanings.ger[]" }}, {"search": { "terms":["urge"], "path": "meanings.eng[]" }} ]} }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits[0].doc["ent_seq"], "1587690"); assert_eq!(hits.len(), 2); } #[test] fn simple_search_and_connect_hits_with_filter() { let req = json!({ "search_req": { "search": { "terms":["urge"], "path": "meanings.eng[]" }}, "filter":{ "search": { "terms":["1587690"], "path": "ent_seq" } } }); let hits = search_testo_to_doc!(req).data; assert_eq!(hits.len(), 1); } #[test] fn or_connect_hits_with_filter() { let req = json!({ "search_req": { "or":{ "queries":[ {"search": { "terms":["majestät"], "path": "meanings.ger[]" }}, {"search": { "terms":["urge"], "path": "meanings.eng[]" }} ] } }, "filter":{ "search": { "terms":["1587690"], "path": "ent_seq" } } }); let hits = search_testo_to_doc!(req).data; assert_eq!(hits.len(), 1); } #[test] fn or_connect_hits_with_filter_reuse_query() { let req = json!({ "search_req": {"or":{ "queries": [ {"search": { "terms":["majestät"], "path": "meanings.ger[]" }}, {"search": { "terms":["urge"], "path": "meanings.eng[]" }} ]}}, "filter":{ "search": { "terms":["urge"], "path": "meanings.eng[]" } } }); let hits = search_testo_to_doc!(req).data; assert_eq!(hits.len(), 1); } #[test] fn should_find_2_values_from_token() { let req = json!({ "search_req": { "search": { "terms":["意慾"], "path": "kanji[].text" }} }); let hits = search_testo_to_doc!(req).data; assert_eq!(hits.len(), 2); } #[test] fn should_search_and_boosto() { let req = json!({ "search_req": { "search": { "terms":["意慾"], "path": "kanji[].text" }}, "boost" : [{ "path":"kanji[].commonness", "boost_fun": "Log10", "param": 1 }] }); let hits = search_testo_to_doc!(req).data; assert_eq!(hits.len(), 2); } #[test] fn should_search_and_double_boost() { let req = json!({ "search_req": { "search": { "terms":["awesome"], "path": "field1[].text" }}, "boost" : [{ "path":"commonness", "boost_fun": "Log10", "param": 1 }, { "path":"field1[].rank", "expression": "10 / $SCORE", "skip_when_score" : [0] }] }); let hits = search_testo_to_doc!(req).data; assert_eq!(hits.len(), 2); } #[test] fn should_search_and_boost_anchor() { let req = json!({ "search_req": { "search": { "terms":["意慾"], "path": "kanji[].text", "levenshtein_distance": 0, "firstCharExactMatch":true }}, "boost" : [{ "path":"commonness", "boost_fun": "Log10", "param": 1 }] }); let hits = search_testo_to_doc!(req).data; assert_eq!(hits[0].doc["commonness"], 500); } #[test] fn should_or_connect_search_and_boost_anchor() { let req = json!({ "or":{ "queries": [ { "search": { "terms":["awesome"], "path": "field1[].text", "options": {"boost" : [{ "path":"field1[].rank", "boost_fun": "Log10", "param": 1 }]} } }, { "search": { "terms":["urge"], "path": "meanings.eng[]", "options": { "boost" : [{ "path":"commonness", "boost_fun": "Log10", "param": 1 }]} } } ]} }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits[0].doc["commonness"], 20); } #[test] fn should_or_connect_same_search() { let req = json!({ "or":{ "queries": [ { "search": { "terms":["awesome"], "path": "field1[].text" } }, { "search": { "terms":["awesome"], "path": "field1[].text" } } ]} }); let hits = search_request_json_to_doc!(req).data; assert_eq!(hits[0].doc["commonness"], 551); assert_eq!(hits.len(), 2); } #[test] fn should_use_search_on_field_for_suggest_without_sorting_etc() { let req = json!({ "terms":["majes"], "path": "meanings.ger[]", "levenshtein_distance": 0, "starts_with":true, }); let requesto: search::RequestSearchPart = serde_json::from_str(&req.to_string()).expect("Can't parse json"); let pers = &TEST_PERSISTENCE; use veloci::plan_creator::execution_plan::PlanRequestSearchPart; let mut requesto = PlanRequestSearchPart { request: requesto, return_term: true, ..Default::default() }; let results = search_field::get_term_ids_in_field(pers, &mut requesto).unwrap(); let mut all_terms = results.terms.values().collect::<Vec<&String>>(); all_terms.sort(); // assert_eq!(all_terms, ["majestät", "majestätischer", "majestätischer anblick", "majestätisches", "majestätisches aussehen"]); assert_eq!( all_terms, [ "Majestät", "Majestät (f)", "majestätischer", "majestätischer Anblick (m)", "majestätisches", "majestätisches Aussehen (n)" ] ); } // #[test] // fn should_highlight_ids(){ // let mut pers = &TEST_PERSISTENCE; // let inf = search::SnippetInfo{ // num_words_around_snippet : 4, // max_snippets : 40, // snippet_start_tag: "<b>".to_string(), // snippet_end_tag: "</b>".to_string(), // snippet_connector: " ... ".to_string(), // }; // let results = search_field::highlight_document(&pers, "mylongtext.textindex", 13, &[9], &inf).unwrap(); // assert_eq!(results, "this is a <b>story</b> of a guy who ... " ); // } #[test] fn should_highlight_on_field() { let req = json!({ "terms":["story"], "path": "mylongtext", "levenshtein_distance": 0, "starts_with":true, "snippet":true, "top":10, "skip":0 }); let mut requesto: search::RequestSearchPart = serde_json::from_str(&req.to_string()).expect("Can't parse json"); let pers = &TEST_PERSISTENCE; let results = search_field::highlight(pers, &mut requesto).unwrap(); assert_eq!( results.iter().map(|el| el.0.clone()).collect::<Vec<String>>(), ["Prolog:\nthis is a <b>story</b> of a guy who went ... "] ); } #[test] fn should_highlight_on_1_n_field() { let req = json!({ "terms":["story"], "path": "tags[]", "levenshtein_distance": 0, "starts_with":true, "snippet":true, "top":10, "skip":0 }); let mut requesto: search::RequestSearchPart = serde_json::from_str(&req.to_string()).expect("Can't parse json"); let pers = &TEST_PERSISTENCE; let results = search_field::highlight(pers, &mut requesto).unwrap(); assert_eq!( results.iter().map(|el| el.0.clone()).collect::<Vec<String>>(), ["Prolog:\nthis is a <b>story</b> of a guy who went ... "] ); } #[test] fn should_select_on_long_text() { let req = json!({ "search_req": { "search": { "terms":["story"], "path": "mylongtext" }}, "select": ["mylongtext"] }); let hits = search_testo_to_doc!(req).data; assert_eq!(hits.len(), 1); assert_eq!( hits[0].doc["mylongtext"], json!("Prolog:\nthis is a story of a guy who went out to rule the world, but then died. the end".to_string()) ); } #[test] fn should_highlight_on_sub_level_field() { let req = json!({ "terms":["story"], "path": "sub_level[].text", "levenshtein_distance": 0, "starts_with":true, "snippet":true, "top":10, "skip":0 }); let mut requesto: search::RequestSearchPart = serde_json::from_str(&req.to_string()).expect("Can't parse json"); let pers = &TEST_PERSISTENCE; let results = search_field::highlight(pers, &mut requesto).unwrap(); assert_eq!( results.iter().map(|el| el.0.clone()).collect::<Vec<String>>(), ["Prolog:\nthis is <b>story</b> of a guy who went ... "] ); } #[test] fn real_suggest_with_score() { let req = json!({ "terms":["majes"], "path": "meanings.ger[]", "levenshtein_distance": 0, "starts_with":true, "top":10, "skip":0 }); let requesto: search::RequestSearchPart = serde_json::from_str(&req.to_string()).expect("Can't parse json"); let pers = &TEST_PERSISTENCE; let results = search_field::suggest(pers, &requesto).unwrap(); // assert_eq!(results.iter().map(|el| el.0.clone()).collect::<Vec<String>>(), ["majestät", "majestätischer", "majestätisches", "majestätischer anblick", "majestätisches aussehen"]); // assert_eq!(results.iter().map(|el| el.0.clone()).collect::<Vec<String>>(), ["Majestät", "Majestät (f)", "majestätischer", "majestätisches", "majestätischer Anblick (m)", "majestätisches Aussehen (n)"]); assert_eq!( results.iter().map(|el| el.0.clone()).collect::<Vec<String>>(), [ "majestät", "majestät (f)", "majestätisches", "majestätischer", "majestätischer anblick (m)", "majestätisches aussehen (n)" ] ); } #[test] fn multi_real_suggest_with_score() { let req = json!({ "suggest" : [ {"terms":["will"], "path": "meanings.ger[]", "levenshtein_distance": 0, "starts_with":true}, {"terms":["will"], "path": "meanings.eng[]", "levenshtein_distance": 0, "starts_with":true} ], "top":10, "skip":0 }); let requesto: search::Request = serde_json::from_str(&req.to_string()).expect("Can't parse json"); let pers = &TEST_PERSISTENCE; let results = search_field::suggest_multi(pers, requesto).unwrap(); // assert_eq!(results.iter().map(|el| el.0.clone()).collect::<Vec<String>>(), ["will", "wille", "will testo"]); // assert_eq!(results.iter().map(|el| el.0.clone()).collect::<Vec<String>>(), ["will", "Wille", "Wille (m)", "will testo"]); assert_eq!(results.iter().map(|el| el.0.clone()).collect::<Vec<String>>(), ["will", "wille", "wille (m)", "will testo"]); } #[test] fn real_suggest_with_boosting_score_of_begeisterung_and_token_value() { let req = json!({ "terms":["begeist"], "path": "meanings.ger[]", "levenshtein_distance": 0, "starts_with":true, "token_value": { "path":"meanings.ger[]", "boost_fun":"Log10", "param": 1 }, "top":10, "skip":0 }); let requesto: search::RequestSearchPart = serde_json::from_str(&req.to_string()).expect("Can't parse json"); let pers = &TEST_PERSISTENCE; let results = search_field::suggest(pers, &requesto).unwrap(); // assert_eq!(results.iter().map(|el| el.0.clone()).collect::<Vec<String>>(), ["begeisterung", "begeistern"]); // assert_eq!(results.iter().map(|el| el.0.clone()).collect::<Vec<String>>(), ["Begeisterung", "begeistern", "Begeisterung (f)"]); assert_eq!( results.iter().map(|el| el.0.clone()).collect::<Vec<String>>(), ["begeisterung", "begeistern", "begeisterung (f)"] ); } #[test] fn should_rank_boost_on_anchor_higher_search_on_anchor() { let hits_boosted = search_testo_to_doc!(json!({ "search_req": { "search": { "terms":["COllectif"], "path": "title" }}, "boost" : [{ "path":"commonness", "boost_fun": "Log2", "param": 2 }] })) .data; let hits_unboosted = search_request_json_to_doc!(json!({ "search": { "terms":["COllectif"], "path": "title" } })) .data; assert_gt!(hits_boosted[0].hit.score, hits_unboosted[0].hit.score); } #[test] fn should_rank_boost_on_anchor_higher_search_on_1_n() { let hits_boosted = search_testo_to_doc!(json!({ "search_req": {"search": { "terms":["boostemich"], "path": "meanings.ger[]" }}, "boost" : [{ "path":"commonness", "boost_fun": "Log2", "param": 2 }] })) .data; let hits_unboosted = search_testo_to_doc!(json!({ "search_req": {"search": { "terms":["boostemich"], "path": "meanings.ger[]" }} })) .data; assert_gt!(hits_boosted[0].hit.score, hits_unboosted[0].hit.score); } #[test] fn should_check_explain_plan_contents() { let req = json!({ "search_req": {"search": { "terms":["weich"], // hits welche and weich "path": "meanings.ger[]", "levenshtein_distance": 1, "firstCharExactMatch":true }}, "boost" : [{ "path":"commonness", "boost_fun": "Log2", "param": 2 }] }); let explain = search_testo_to_explain!(req).to_lowercase(); assert_contains!(explain, "weich"); // include the term assert_contains!(explain, "meanings.ger[]"); // include the field assert_contains!(explain, "boost"); } #[test] fn should_boost_terms_and_from_cache() { let req = json!({ "search_req": {"search": { "terms":["weich"], "path": "meanings.ger[]", "levenshtein_distance": 1, "firstCharExactMatch":true }}, "boost_term":[{ "terms":["9555"], "path": "ent_seq", "boost": 5.0 }] }); let hits = search_testo_to_doc!(req).data; assert_eq!(hits[0].doc["meanings"]["ger"][0], "(1) 2 3 super nice weich"); //using boost cache here let hits = search_testo_to_doc!(req).data; assert_eq!(hits[0].doc["meanings"]["ger"][0], "(1) 2 3 super nice weich"); let hits = search_testo_to_doc!(req).data; assert_eq!(hits[0].doc["meanings"]["ger"][0], "(1) 2 3 super nice weich"); } #[test] fn should_add_why_found_terms() { let req = json!({ "search_req": {"search": { "terms":["weich"], "path": "meanings.ger[]", "levenshtein_distance": 1, "firstCharExactMatch":true }}, "why_found":true, "explain": true }); let hits = search_testo_to_doc!(req).data; println!("{}", serde_json::to_string_pretty(&hits).unwrap()); assert_eq!(hits[0].doc["meanings"]["ger"][0], "(1) weich"); } #[test] fn or_connect_hits_but_boost_one_term() { let req = json!({ "search_req": {"or":{"queries":[ {"search": {"terms":["majestät (f)"], "path": "meanings.ger[]", "boost": 2}}, {"search": {"terms":["urge"], "path": "meanings.eng[]"}} ]}} }); let hits = search_testo_to_doc!(req).data; assert_eq!(hits.len(), 2); assert_eq!(hits[0].doc["meanings"]["ger"][0], "majestätischer Anblick (m)"); } #[test] fn get_bytes_indexed() { let pers = &TEST_PERSISTENCE; assert_gt!(pers.get_bytes_indexed(), 2685); } #[test] fn boost_text_localitaet() { let req = json!({ "search_req": { "or":{ "queries":[ {"search": {"terms":["text"], "path": "meanings.ger[]"}}, {"search": {"terms":["localität"], "path": "meanings.ger[]"}}, ] } }, "text_locality": true, "explain": true }); let hits = search_testo_to_doc!(req).data; assert_eq!(hits[0].doc["meanings"]["ger"][0], "text localität"); } #[test] fn read_object_only_partly() { let pers = &TEST_PERSISTENCE; let yay = search::read_data( pers, 4, &[ "commonness".to_string(), "ent_seq".to_string(), "meanings.ger[]".to_string(), "kana[].text".to_string(), "kana[].commonness".to_string(), "kana[].romaji".to_string(), "address[].line[]".to_string(), ], ) .unwrap(); assert_eq!( yay, json!({ "address": [ {"line": ["nuts strees"] }, {"line": ["asdf"] } ], "commonness": "500", "ent_seq": "1587700", "meanings": { "ger": ["der test", "das ist ein guter Treffer"] }, "kana": [{"text": "いよく"} ] }) ); } #[test] fn should_skip_existing_fields_which_are_not_existent_in_the_object_none_values() { let pers = &TEST_PERSISTENCE; //Check None values let yay = search::read_data(pers, 3, &["mylongtext".to_string()]).unwrap(); assert_eq!(yay, json!({})); } #[test] fn read_recreate_complete_object_with_read() { let pers = &TEST_PERSISTENCE; let all_props = pers.metadata.get_all_fields(); let yay2 = search::read_data(pers, 4, &all_props).unwrap(); assert_eq!( yay2, json!({ //TODO FIX INTEGER TO STRING "id": "1234566", "gender": "male", "tags": ["awesome", "cool"], "sinlge_value_multi": ["wert"], "birthDate": "1960-08-19", "address": [{"line": ["nuts strees"] }, {"line": ["asdf"] } ], "commonness": "500", "kanji": [{ "text": "意慾", "commonness": "20"} ], "field1" : [{"text":"awesome", "rank":"1"}], "kana": [{"text": "いよく"} ], "meanings": {"eng" : ["test1"], "ger": ["der test", "das ist ein guter Treffer"] }, "ent_seq": "1587700" }) ); }
pub mod bench; pub mod bristol;
// Export the enum variants, without the enum mod foo { export t1; enum t { t1, } } fn main() { let v = foo::t1; }
pub fn read<T: std::str::FromStr>() -> T { let mut s = String::new(); std::io::stdin().read_line(&mut s).ok(); s.trim().parse().ok().unwrap() } pub fn read_vec<T: std::str::FromStr>() -> Vec<T> { read::<String>() .split_whitespace() .map(|e| e.parse().ok().unwrap()) .collect() } pub fn read_vec2<T: std::str::FromStr>(n: u32) -> Vec<Vec<T>> { (0..n).map(|_| read_vec()).collect() } pub fn read_col<T: std::str::FromStr>(n: u32) -> Vec<T> { (0..n).map(|_| read()).collect() } fn left(i: usize) -> usize { (i + 1) * 2 - 1 } fn right(i: usize) -> usize { (i + 1) * 2 } fn max_heapify(v: &mut Vec<i32>, i: usize) { let h = v.len(); let l = left(i); let r = right(i); let mut largest = i; if l < h && v[l] > v[i] { largest = l; } if r < h && v[r] > v[largest] { largest = r; } if largest != i { let tmp = v[largest]; v[largest] = v[i]; v[i] = tmp; max_heapify(v, largest); } } fn build_max_heap(v: &mut Vec<i32>) { let h = v.len(); for i in (0..h / 2).rev() { max_heapify(v, i); } } fn main() { let h: usize = read(); let mut v: Vec<i32> = read_vec(); build_max_heap(&mut v); for i in 0..h { print!(" {}", v[i]); } println!(""); }
// Make sure that fn-to-block coercion isn't incorrectly lifted over // other tycons. fn coerce(b: fn()) -> native fn() { fn lol(f: native fn(fn()) -> native fn(), g: fn()) -> native fn() { ret f(g); } fn fn_id(f: native fn()) -> native fn() { ret f } ret lol(fn_id, b); //!^ ERROR mismatched types: expected `native fn(fn()) -> native fn()` } fn main() { let i = 8; let f = coerce({|| log(error, i); }); f(); }
extern crate tch; //use std::f64::consts::PI; use tch::{kind, Tensor, manual_seed, no_grad}; use hello::{Distribution, MultivariateNormal, Scale}; fn main() { // let sigma = Tensor::eye(8, kind::FLOAT_CPU); // let sigma2 = sigma.expand(&[6, 1, 8, 8], true).contiguous(); // sigma2.requires_grad(); // let bx = Tensor::randn(&[8000, 6, 1, 8], kind::FLOAT_CPU); // let bl = sigma2; // let _ = _batch_mahalanobis(&bl, &bx); // let _ = _batch_mv(&bl, &bx); _test_rsample1(); _test_log_prob(); } fn _test_rsample1() { let loc = Tensor::of_slice(&[1.0f32, 2.0]); let scale = Tensor::of_slice(&[1.0f32, 0.0, -0.5, 2.0]).reshape(&[2, 2]); let dist = MultivariateNormal::new(loc, Scale::ScaleTril(scale)); let xs = dist.rsample(&[5, 4]); xs.print(); } fn _test_log_prob() { let loc = Tensor::of_slice(&[1.0f32, 2.0]); let scale = Tensor::of_slice(&[1.0f32, 0.0, -0.5, 2.0]).reshape(&[2, 2]); let dist = MultivariateNormal::new(loc, Scale::ScaleTril(scale)); let s = Tensor::arange2(-8.0, 8.0, 0.05, kind::FLOAT_CPU); let xs = s.reshape(&[-1, 1]).ones_like().matmul(&s.reshape(&[1, -1])); // let ys = xs.transpose(1, 0); # it causes bug when writing it to npz? let ys = s.reshape(&[-1, 1]).matmul(&s.reshape(&[1, -1]).ones_like()); let xys = Tensor::stack(&[&xs.reshape(&[-1]), &ys.reshape(&[-1])], 1); let lp = dist.log_prob(&xys).reshape(&xs.size()); let f = "test_log_prob.npz"; Tensor::write_npz(&[("xs", &xs), ("ys", &ys), ("lp", &lp)], f); }
use azure_core::Context; use azure_cosmos::prelude::*; use std::error::Error; #[tokio::main] async fn main() -> Result<(), Box<dyn Error + Send + Sync>> { // First we retrieve the account name and master key from environment variables. // We expect master keys (ie, not resource constrained) let master_key = std::env::var("COSMOS_MASTER_KEY").expect("Set env variable COSMOS_MASTER_KEY first!"); let account = std::env::var("COSMOS_ACCOUNT").expect("Set env variable COSMOS_ACCOUNT first!"); let database_name = std::env::args() .nth(1) .expect("please specify the database name as first command line parameter"); let collection_name = std::env::args() .nth(2) .expect("please specify the collection name as second command line parameter"); let user_name = std::env::args() .nth(3) .expect("please specify the user name as third command line parameter"); let authorization_token = AuthorizationToken::primary_from_base64(&master_key)?; let client = CosmosClient::new( account.clone(), authorization_token, CosmosOptions::default(), ); let database_client = client.clone().into_database_client(database_name.clone()); let collection_client = database_client .clone() .into_collection_client(collection_name.clone()); let user_client = database_client.into_user_client(user_name); let get_collection_response = collection_client .get_collection(Context::new(), GetCollectionOptions::new()) .await?; println!("get_collection_response == {:#?}", get_collection_response); let create_user_response = user_client .create_user(Context::new(), CreateUserOptions::default()) .await?; println!("create_user_response == {:#?}", create_user_response); // test list documents let list_documents_response = collection_client .list_documents() .execute::<serde_json::Value>() .await .unwrap(); println!( "list_documents_response got {} document(s).", list_documents_response.documents.len() ); // create the first permission! let permission_client = user_client.clone().into_permission_client("matrix"); let permission_mode = get_collection_response.collection.read_permission(); let create_permission_response = permission_client .create_permission( Context::new(), CreatePermissionOptions::new().expiry_seconds(18000u64), // 5 hours, max! &permission_mode, ) .await .unwrap(); println!( "create_permission_response == {:#?}", create_permission_response ); // change the AuthorizationToken using the token // of the permission. let new_authorization_token: AuthorizationToken = create_permission_response .permission .permission_token .into(); println!( "Replacing authorization_token with {:?}.", new_authorization_token ); let mut client = client.clone(); client.auth_token(new_authorization_token); // let's list the documents with the new auth token let list_documents_response = client .clone() .into_database_client(database_name.clone()) .into_collection_client(collection_name.clone()) .list_documents() .execute::<serde_json::Value>() .await .unwrap(); println!( "second list_documents_response got {} document(s).", list_documents_response.documents.len() ); // Now we try to insert a document with the "read-only" // authorization_token just created. It will fail. // The collection should have /id as partition key // for this example to work. let data = r#" { "id": "Gianluigi Bombatomica", "age": 43, "phones": [ "+39 1234567", "+39 2345678" ] }"#; let document = serde_json::from_str::<serde_json::Value>(data)?; match client .clone() .into_database_client(database_name.clone()) .into_collection_client(collection_name.clone()) .create_document( Context::new(), &document, CreateDocumentOptions::new() .is_upsert(true) .partition_key(&"Gianluigi Bombatomica") .unwrap(), ) .await { Ok(_) => panic!("this should not happen!"), Err(error) => println!("Insert failed: {:#?}", error), } permission_client .delete_permission(Context::new(), DeletePermissionOptions::new()) .await?; // All includes read and write. let permission_mode = get_collection_response.collection.all_permission(); let create_permission_response = permission_client .create_permission( Context::new(), CreatePermissionOptions::new().expiry_seconds(18000u64), // 5 hours, max! &permission_mode, ) .await .unwrap(); println!( "create_permission_response == {:#?}", create_permission_response ); let new_authorization_token: AuthorizationToken = create_permission_response .permission .permission_token .into(); println!( "Replacing authorization_token with {:?}.", new_authorization_token ); client.auth_token(new_authorization_token); // now we have an "All" authorization_token // so the create_document should succeed! let create_document_response = client .into_database_client(database_name) .into_collection_client(collection_name) .create_document( Context::new(), &document, CreateDocumentOptions::new() .is_upsert(true) .partition_key(&"Gianluigi Bombatomica") .unwrap(), ) .await?; println!( "create_document_response == {:#?}", create_document_response ); println!("Cleaning up user."); let delete_user_response = user_client .delete_user(Context::new(), DeleteUserOptions::new()) .await?; println!("delete_user_response == {:#?}", delete_user_response); Ok(()) }
use std::convert::TryFrom; use std::io; use std::io::Write; use failure::Error; use hyper::body::Buf; use hyper::body::HttpBody; use hyper::body::Sender; use md5::digest::FixedOutput; use md5::digest::Input; use tokio::io::AsyncReadExt as _; use tokio::io::AsyncWrite; use tokio::io::AsyncWriteExt as _; use tokio::prelude::AsyncRead; use zstd::stream::raw::Operation; use super::dir::ContentInfo; pub async fn stream_pack<W: Unpin + AsyncWrite>( mut body: hyper::Body, mut out: W, ) -> Result<ContentInfo, Error> { let mut enc = zstd::stream::Encoder::new(io::Cursor::new(Vec::with_capacity(8 * 1024)), 3)?; enc.include_checksum(true)?; let mut length = 0; let mut md5 = md5::Md5::default(); while let Some(data) = body.data().await { // typically 8 - 128kB chunks let mut data = data?; md5.input(&data); length += u64::try_from(data.len())?; while !data.is_empty() { let written = enc.write(&data)?; data.advance(written); let cursor = enc.get_mut(); let vec = cursor.get_mut(); // frequently (for compressible data), the write has not caused any new frames if !vec.is_empty() { out.write_all(vec).await?; vec.clear(); cursor.set_position(0); } } } out.write_all(enc.finish()?.get_ref()).await?; let md5_base64 = base64::encode(&md5.fixed_result()); Ok(ContentInfo { length, md5_base64 }) } pub async fn stream_unpack<R: Unpin + AsyncRead>( mut from: R, mut sender: Sender, ) -> Result<(), Error> { let mut dec = zstd::stream::raw::Decoder::new()?; let mut inp = Vec::with_capacity(16 * 1024); loop { let found = { let mut buf = [0u8; 8 * 1024]; let found = from.read(&mut buf).await?; inp.extend_from_slice(&buf[..found]); found }; loop { let mut buf = [0u8; 16 * 1024]; let status = dec.run_on_buffers(&inp, &mut buf)?; inp.drain(..status.bytes_read); if 0 == status.bytes_written { break; } sender .send_data(buf[..status.bytes_written].to_vec().into()) .await?; } if 0 == found { if inp.is_empty() { // it doesn't want to write anything (previous loop condition), // we can't feed it any more data (found), and // it read everything that we had available return Ok(()); } return Err(io::Error::from(io::ErrorKind::UnexpectedEof).into()); } } }
use anyhow::{Context, Result}; pub fn parse_rust_into_ast(rust_source: &str) -> Result<syn::File> { syn::parse_str(rust_source).with_context(|| "Parse error") }
use crate::helpers; use std::env; // Parallel tests on the same env variable causes problems. // #[test] // fn should_use_application_port_env_var_if_specified() { // helpers::initialize(); // env::set_var("APPLICATION_PORT", "8001"); // let port = 8001; // env::set_var("APPLICATION_PORT", port.to_string()); // let settings_res = group_expenses::Settings::new(); // assert!(settings_res.is_ok()); // assert_eq!(port, settings_res.unwrap().application_port()); // } #[test] fn should_use_the_default_application_port_without_env_var() { helpers::initialize(); env::remove_var("APPLICATION_PORT"); let settings = group_expenses::Settings::new(); assert!(settings.is_ok(), settings.err().unwrap().to_string()); assert_eq!( group_expenses::Settings::new().unwrap().application_port(), settings.unwrap().application_port() ); }
struct Lyndon { n: usize, k: u8, buf: Option<Vec<u8>>, } impl Lyndon { fn new(n: usize, k: u8) -> Self { Self { n, k, buf: None } } } impl Iterator for Lyndon { type Item = Vec<u8>; fn next(&mut self) -> Option<Self::Item> { match &mut self.buf { None => { // Initialise let mut v = Vec::with_capacity(self.n); v.push(0); self.buf = Some(v); Some(vec![0]) } Some(buf) => { // Repeat until chosen length let len = buf.len(); for i in len..self.n { buf.push(buf[i % len]); } // Remove last element if it is the greatest value while buf.len() != 0 { if buf[buf.len() - 1] != self.k - 1 { break; } buf.pop(); } // Increment last element let len = buf.len(); if len != 0 { buf[len - 1] += 1; } // Give result match len { 0 => None, _ => Some(buf.clone()), } } } } } pub fn debruijn(n: usize, k: u8) -> impl Iterator<Item = u8> { Lyndon::new(n, k) .filter(move |x| n % x.len() == 0) .flatten() } pub fn lookup(n: usize, k: u8, needle: &[u8]) -> usize { let table = debruijn(n, k).collect::<Vec<_>>().repeat(2); let position = table .windows(needle.len()) .position(|window| window == needle); position.unwrap() }
use crate::chain_spec::{ChainSpec, TELEMETRY_URL}; use hex_literal::hex; use jsonrpc_core::serde_json::Map; use sc_service::{config::TelemetryEndpoints, ChainType}; #[cfg(not(feature = "parachain"))] use sp_core::crypto::UncheckedInto; use zeitgeist_primitives::{constants::BASE, types::AccountId}; pub fn battery_park_staging_config( #[cfg(feature = "parachain")] id: cumulus_primitives_core::ParaId, ) -> Result<ChainSpec, String> { let wasm_binary = zeitgeist_runtime::WASM_BINARY .ok_or("Development wasm binary not available".to_string())?; let mut properties = Map::new(); properties.insert("tokenSymbol".into(), "ZBP".into()); properties.insert("tokenDecimals".into(), 10.into()); Ok(ChainSpec::from_genesis( "Zeitgeist Battery Park Staging", "battery_park_staging", ChainType::Live, move || { battery_park_staging_genesis( vec![ // 5D2L4ghyiYE8p2z7VNJo9JYwRuc8uzPWtMBqdVyvjRcsnw4P hex!["2a6c61a907556e4c673880b5767dd4be08339ee7f2a58d5137d0c19ca9570a5c"].into(), // 5EeeZVU4SiPG6ZRY7o8aDcav2p2mZMdu3ZLzbREWuHktYdhX hex!["725bb6fd13d52b3d6830e5a9faed1f6499ca0f5e8aa285df09490646e71e831b"].into(), ], #[cfg(feature = "parachain")] id, #[cfg(not(feature = "parachain"))] vec![( // 5FCSJzvmeUW1hBo3ASnLzSxpUdn5QUDt1Eqobj1meiQB7mLu hex!["8a9a54bdf73fb4a757f5ab81fabe2f173922fdb92bb8b6e8bedf8b17fa38f500"] .unchecked_into(), // 5HGProUwcyCDMJDxjBBKbv8u7ehr5uoTBS3bckYHPcZMTifW hex!["e61786c6426b55a034f9c4b78dc57d4183927cef8e64b2e496225ed6fca41758"] .unchecked_into(), )], hex!["2a6c61a907556e4c673880b5767dd4be08339ee7f2a58d5137d0c19ca9570a5c"].into(), wasm_binary, ) }, vec![], TelemetryEndpoints::new(vec![(TELEMETRY_URL.into(), 0)]).ok(), Some("battery_park_staging"), Some(properties), #[cfg(feature = "parachain")] crate::chain_spec::Extensions { relay_chain: "rococo-battery-park".into(), para_id: id.into(), }, #[cfg(not(feature = "parachain"))] Default::default(), )) } fn battery_park_staging_genesis( endowed_accounts: Vec<AccountId>, #[cfg(feature = "parachain")] id: cumulus_primitives_core::ParaId, #[cfg(not(feature = "parachain"))] initial_authorities: Vec<( sp_consensus_aura::sr25519::AuthorityId, sp_finality_grandpa::AuthorityId, )>, root_key: AccountId, wasm_binary: &[u8], ) -> zeitgeist_runtime::GenesisConfig { let initial_balance = 10_000 * BASE; zeitgeist_runtime::GenesisConfig { frame_system: zeitgeist_runtime::SystemConfig { // Add Wasm runtime to storage. code: wasm_binary.to_vec(), changes_trie_config: Default::default(), }, orml_tokens: zeitgeist_runtime::TokensConfig::default(), #[cfg(not(feature = "parachain"))] pallet_aura: zeitgeist_runtime::AuraConfig { authorities: initial_authorities.iter().map(|x| (x.0.clone())).collect(), }, pallet_balances: zeitgeist_runtime::BalancesConfig { balances: endowed_accounts .iter() .cloned() .map(|k| (k, initial_balance)) .collect(), }, #[cfg(not(feature = "parachain"))] pallet_grandpa: zeitgeist_runtime::GrandpaConfig { authorities: initial_authorities .iter() .map(|x| (x.1.clone(), 1)) .collect(), }, pallet_sudo: zeitgeist_runtime::SudoConfig { // Assign the network admin rights. key: root_key, }, #[cfg(feature = "parachain")] parachain_info: zeitgeist_runtime::ParachainInfoConfig { parachain_id: id }, } }
extern crate cc; use walkdir::WalkDir; fn main() { compile_and_link_c_files(); } fn compile_and_link_c_files() { let mut build = cc::Build::new(); // Recursively traverse all files add them to the compiler. for entry in WalkDir::new("c_lib") .into_iter() .filter_map(Result::ok) .filter(|e| !e.file_type().is_dir()) .filter(|e| { let fname = e.file_name().to_str().unwrap_or_default(); fname.ends_with(".c") }) { build.file(entry.path()); } build.include("c_lib").static_flag(true); // Finally compile all c files build.compile("c.lib"); }
use lmdb::{Cursor, Database, Environment, Transaction}; use std::collections::HashMap; use std::fs::create_dir_all; use std::path::PathBuf; use thiserror::Error; use crate::Record; use crate::RoQuery; /// Storage provides a simple interface for interacting with databases pub struct Storage { env: Environment, #[allow(dead_code)] path: PathBuf, dbs: HashMap<&'static str, lmdb::Database>, } /// Errors that can arise from interacting with Storage #[derive(Error, Debug)] pub enum StorageError { #[error("could not access database directory")] FileError { #[from] source: std::io::Error, }, #[error("could not process database command")] DBError { #[from] source: lmdb::Error, }, } impl Storage { /// Creates or Opens a storage directory for managing databases. /// /// LMDB storage expects path to be a directory. /// /// If the path does not exist it will be created. /// /// # Arguments /// /// * `path` - The path where the database should be created / opened /// /// # Examples /// /// ``` /// use nostalgia::{Storage, StorageError}; /// /// fn main() -> Result<(), StorageError> { /// // Into trait allows for str argument /// let a = Storage::new("/tmp/db")?; /// /// // Also allows for a std::string::String /// let b = Storage::new(String::from("/tmp/db2"))?; /// /// // PathBuf's also work /// let c = Storage::new(std::env::temp_dir())?; /// /// Ok(()) /// } /// /// ``` /// pub fn new<P: Into<PathBuf>>(path: P) -> Result<Storage, StorageError> { let mut builder = lmdb::Environment::new(); builder.set_max_dbs(2048); builder.set_map_size(256 * 1024 * 1024); let p = &path.into(); create_dir_all(p)?; let env = builder.open(p).unwrap(); Ok(Storage { env, path: p.to_path_buf(), dbs: HashMap::new(), }) } fn db(&mut self, db_name: &'static str) -> Result<Database, StorageError> { match self.dbs.get(db_name) { Some(db) => Ok(*db), None => { let db = self .env .create_db(Some(db_name), lmdb::DatabaseFlags::empty())?; self.dbs.insert(db_name, db); Ok(db) } } } /// Serializes and Saves a record in one of the databases contained in storage. /// /// Input should implement the Record trait. The database the record is saved to and the key /// used is configured using that trait. /// /// # Arguments /// * `record` - A type that implements the Record trait. /// /// # Examples /// ``` /// #[macro_use] /// extern crate nostalgia_derive; /// use nostalgia::{Storage, StorageError, Record, Key}; /// use serde::{Serialize, Deserialize}; /// /// #[derive(Storable, Serialize, Deserialize)] /// #[key = "id"] /// struct Place { /// id: u32, /// name: std::string::String /// } /// /// fn main() -> Result<(), StorageError> { /// let mut storage = Storage::new("/tmp/db")?; /// let place = Place { id: 1, name: "Vienna".to_string() }; /// storage.save(&place)?; /// /// Ok(()) /// } /// ``` /// pub fn save<T: Record>(&mut self, record: &T) -> Result<(), StorageError> { let db = self.db(T::db_name())?; let mut tx = self.env.begin_rw_txn()?; let bytes = T::to_binary(record).expect("Could not serialize"); tx.put(db, &record.key().into(), &bytes, lmdb::WriteFlags::empty())?; tx.commit()?; Ok(()) } /// Saves a group of records to the internal type's database /// /// /// # Arguments /// * `records` - A Vec that contains objects that implement Record trait /// /// # Examples /// ``` /// #[macro_use] /// extern crate nostalgia_derive; /// use nostalgia::{Storage, Record, StorageError, Key}; /// use serde::{Serialize, Deserialize}; /// /// #[derive(Storable, Serialize, Deserialize)] /// #[key = "id"] /// struct Place { /// id: u32, /// name: std::string::String /// } /// /// fn main() -> Result<(), StorageError> { /// let mut storage = Storage::new("/tmp/db")?; /// /// let records = vec![ /// Place { id: 1, name: "Vienna".to_string() }, /// Place { id: 2, name: "Paris".to_string() }, /// Place { id: 3, name: "Istanbul".to_string() }, /// Place { id: 4, name: "London".to_string() }, /// ]; /// /// storage.save_batch(records)?; /// /// Ok(()) /// } /// ``` /// pub fn save_batch<T: Record>(&mut self, records: Vec<T>) -> Result<(), StorageError> { let db = self.db(T::db_name())?; let mut tx = self.env.begin_rw_txn()?; for record in records { let bytes = T::to_binary(&record).expect("Could not serialize"); tx.put(db, &record.key().into(), &bytes, lmdb::WriteFlags::empty())?; } tx.commit()?; Ok(()) } /// Retrieves a record from the database /// /// # Arguments /// * `key` - A Vec of usigned 8bit integers representing the key. Will make this more sugar-y /// eventually /// /// # Examples /// ``` /// #[macro_use] /// extern crate nostalgia_derive; /// use nostalgia::{Storage, Record, StorageError, Key}; /// use serde::{Serialize, Deserialize}; /// /// #[derive(Storable, Serialize, Deserialize)] /// #[key = "id"] /// struct Place { /// id: u32, /// name: std::string::String /// } /// /// fn main() -> Result<(), StorageError> { /// let mut storage = Storage::new("/tmp/db")?; /// /// let paris: Place = storage.get(2) /// .expect("Error fetching") /// .expect("Empty record"); /// /// assert_eq!("Paris", paris.name); /// /// Ok(()) /// } /// ``` pub fn get<T: Record, K: Into<T::Key>>(&mut self, key: K) -> Result<Option<T>, StorageError> { let db = self.db(T::db_name())?; let txn = self.env.begin_ro_txn()?; let cursor = txn.open_ro_cursor(db)?; let result = cursor.get(Some(&key.into().into()), None, 15)?; match T::from_binary(result.1) { Ok(record) => Ok(Some(record)), Err(_) => Ok(None), } } /// Deletes a record from the database /// /// # Arguments /// * `record` - A type that implements the Record trait. /// /// # Examples /// ``` /// #[macro_use] /// extern crate nostalgia_derive; /// use nostalgia::{Storage, Record, Key, StorageError}; /// use serde::{Serialize, Deserialize}; /// /// #[derive(Storable, Serialize, Deserialize)] /// #[key = "id"] /// struct Place { /// id: u32, /// name: std::string::String /// } /// /// fn main() -> Result<(), StorageError> { /// let mut storage = Storage::new("/tmp/db")?; /// let place = Place { id: 1, name: "Vienna".to_string() }; /// storage.save(&place)?; /// /// storage.delete(&place)?; /// /// Ok(()) /// } /// ``` pub fn delete<T: Record>(&mut self, record: &T) -> Result<(), StorageError> { let db = self.db(T::db_name())?; let mut tx = self.env.begin_rw_txn()?; tx.del(db, &record.key().into(), None)?; tx.commit()?; Ok(()) } /// Returns an RoQuery object that allows you to Iterate over all records in a database. /// /// # Examples /// ``` /// #[macro_use] /// extern crate nostalgia_derive; /// use nostalgia::{Storage, Record, Key, StorageError}; /// use serde::{Serialize, Deserialize}; /// /// #[derive(Storable, Serialize, Deserialize)] /// #[key = "id"] /// struct Place { /// id: u32, /// name: std::string::String /// } /// /// fn main() -> Result<(), StorageError> { /// let mut storage = Storage::new("/tmp/db")?; /// let query = storage.query::<Place>()?; /// /// for place in query { /// println!("{}", place.name); /// } /// /// Ok(()) /// } /// ``` pub fn query<T: Record>(&mut self) -> Result<RoQuery<T>, StorageError> { let db = self.db(T::db_name())?; let txn = self.env.begin_ro_txn()?; Ok(RoQuery { phantom: std::marker::PhantomData::<T>, db, txn, iter: None, }) } /// Returns the first record that matches a predicate /// /// # Examples /// ``` /// #[macro_use] /// extern crate nostalgia_derive; /// use nostalgia::{Storage, Record, Key, StorageError}; /// use serde::{Serialize, Deserialize}; /// /// #[derive(Storable, Serialize, Deserialize)] /// #[key = "id"] /// struct Place { /// id: u32, /// name: std::string::String /// } /// /// fn main() -> Result<(), StorageError> { /// let mut storage = Storage::new("/tmp/db")?; /// /// let place = storage.find::<Place>(&|p| p.name == "Istanbul")?; /// if let Some(istanbul) = place { /// assert_eq!(istanbul.name, "Istanbul"); /// } else { /// assert_ne!(0, 0, "Could not find record"); /// } /// /// Ok(()) /// } pub fn find<T: Record>(&mut self, p: &dyn Fn(&T) -> bool) -> Result<Option<T>, StorageError> { let mut query = self.query::<T>()?; Ok(query.find(p)) } /// Removes all records in the corresponding type's database pub fn truncate<T: Record>(&mut self) -> Result<(), StorageError> { let db = self.db(T::db_name())?; let mut txn = self.env.begin_rw_txn()?; txn.clear_db(db)?; txn.commit()?; Ok(()) } /// Completely removes the database for a specific type pub fn drop<T: Record>(&mut self) -> Result<(), StorageError> { let db = self.db(T::db_name())?; let mut txn = self.env.begin_rw_txn()?; unsafe { txn.drop_db(db)?; } txn.commit()?; self.dbs.remove(T::db_name()); Ok(()) } } #[cfg(test)] mod tests { use super::*; use crate::Key; use fake::faker::name::en::Name; use fake::{Dummy, Fake, Faker}; use serde::{Deserialize, Serialize}; #[derive(Debug, Serialize, Deserialize, Dummy, PartialEq)] struct Person { #[dummy(faker = "1..1000")] id: u32, #[dummy(faker = "Name()")] name: String, } impl Record for Person { type Key = Key<u32>; fn key(&self) -> Key<u32> { Key::from(self.id) } fn db_name() -> &'static str { "Person" } } fn clear_db(storage: &mut Storage) { match storage.truncate::<Person>() { Ok(_) => assert_eq!(0, 0), Err(_) => assert_ne!(0, 0, "Could not truncate Person db"), } } #[test] fn test_that_we_keep_track_of_db_references() { let mut storage = Storage::new(std::env::temp_dir()).expect("Could not open db storage"); assert_eq!(0, storage.dbs.len()); let p: Person = Faker.fake(); storage.save(&p).expect("Could not save record"); assert_eq!(1, storage.dbs.len()); match storage.drop::<Person>() { Ok(_) => assert_eq!(0, storage.dbs.len()), Err(_) => assert_ne!(0, 0, "Could not drop database"), } } #[test] fn test_that_we_can_insert_and_get_records_with_a_storage_object() { let mut storage = Storage::new(std::env::temp_dir()).expect("Could not open db storage"); clear_db(&mut storage); let person: Person = Faker.fake(); assert_eq!("Person", Person::db_name()); let _ = storage.save(&person).expect("Could not save record"); let p: Result<Option<Person>, StorageError> = storage.get(person.key()); match p { Ok(Some(pn)) => assert_eq!(pn, person), Ok(None) => assert_ne!(0, 0, "Didn't get a result back"), Err(_) => assert_ne!(0, 0, "Got an error"), }; } #[test] fn test_that_we_can_batch_insert_records_and_then_interate() { let records_to_create: u32 = 10000; let mut records: Vec<Person> = vec![]; for idx in 0..records_to_create { records.push(Person { id: idx, name: Name().fake(), }); } let mut storage = Storage::new(std::env::temp_dir()).expect("Could not open db storage"); clear_db(&mut storage); let _ = storage.save_batch(records).expect("Could not save records"); let person_iterator = storage.query::<Person>().unwrap(); let mut cnt = 0; for _ in person_iterator { cnt += 1; } assert_eq!(records_to_create, cnt); } }
use std::slice; fn get_node_value(l:&mut slice::Iter<usize>) -> usize{ let header = l.take(2).collect::<Vec<&usize>>(); let n_node = *header[0]; let n_metadata = *header[1]; let mut child_nodes = Vec::new(); for _ in 0..n_node { child_nodes.push(get_node_value(l)); } if n_node == 0 { l.take(n_metadata).sum::<usize>() } else { l.take(n_metadata).filter(|&i| 0 < *i && *i <= n_node) .map(|i| child_nodes[i - 1]).sum::<usize>() } } fn main() { use std::io::{self, BufRead}; let stdin = io::stdin(); let line = stdin.lock().lines().next().unwrap().unwrap().split(" ") .map(|s| s.parse::<usize>().unwrap()).collect::<Vec<usize>>(); println!("{}", get_node_value(&mut line.iter())); }
///// chapter 3 "using functions and control structures" ///// program section: // fn main() { let negafive = abs(-5); println!("{}", negafive); } fn abs(x: i32) -> i32 { if x > 0 { x } else { x * x } } ///// output should be: /* */// end of output
use bytes::{Buf, BytesMut}; use std::cmp::{self}; use std::collections::VecDeque; use std::io::IoSlice; pub fn fill_read_buf(src: &mut BytesMut, dst: &mut [u8]) -> usize { if src.is_empty() { return 0; } let mut n = src.len(); if n > dst.len() { n = dst.len(); } dst[0..n].copy_from_slice(&src[0..n]); src.advance(n); if src.is_empty() { src.clear(); } n } const MAX_VEC_BUF: usize = 64; pub struct VBuf { cur: usize, inner: VecDeque<Vec<u8>>, empty: [u8; 0], } impl VBuf { pub fn new() -> Self { Self { cur: 0, inner: VecDeque::new(), empty: [0; 0], } } pub fn vlen(&self) -> usize { self.inner.len() } pub fn push(&mut self, data: Vec<u8>) -> bool { if self.inner.len() >= MAX_VEC_BUF || data.is_empty() { return false; } self.inner.push_back(data); true } } impl Buf for VBuf { fn remaining(&self) -> usize { let sum = self.inner.iter().map(|bytes| bytes.len()).sum::<usize>(); sum - self.cur } fn bytes(&self) -> &[u8] { if self.inner.is_empty() { return &self.empty; } &self.inner[0][self.cur..] } fn advance(&mut self, mut cnt: usize) { while !self.inner.is_empty() && cnt > 0 { if self.cur + cnt >= self.inner[0].len() { cnt -= self.inner[0].len() - self.cur; self.cur = 0; self.inner.pop_front(); } else { self.cur += cnt; break; } } self.inner.shrink_to_fit(); } fn bytes_vectored<'c>(&'c self, dst: &mut [IoSlice<'c>]) -> usize { let len = cmp::min(self.inner.len(), dst.len()); if len > 0 { dst[0] = IoSlice::new(self.bytes()); } for i in 1..len { dst[i] = IoSlice::new(&self.inner[i]); } len } }
use failure::Fallible; use protobuf_gen::Config; #[test] fn unittest_yellow_book() -> Fallible<()> { env_logger::try_init().unwrap_or_default(); let mut config = Config::new("protos", Some("proxy")); config.add_source("lib_tests/src/person.rs", "yellow_book"); config.add_source("lib_tests/src/city.rs", "yellow_book"); config.generate()?; Ok(()) }
use super::toml_parser; use super::toml_completeness; use super::toml_generator; use std::path::Path; pub fn generate_build_toml( input_file_name: &str, output_file_name: &str, update: bool, ) -> Result<(), ()> { if Path::new(output_file_name).exists() && !update { return Ok(()); } let parsed_toml_attempt = toml_parser::parse_toml(input_file_name); let mut parsed_toml; match parsed_toml_attempt { Ok(toml) => { parsed_toml = toml; } Err(e) => { match e { toml_parser::ParseFailed::FileError(e) => { println!( "Error reading {}:\n{}", input_file_name, e.to_string() ) } toml_parser::ParseFailed::TomlError(e) => { println!( "Error parsing {} at line {}, char {}:\n{}", input_file_name, e.line_col().unwrap().0, e.line_col().unwrap().1, e.to_string() ) } } return Err(()); } } let completed_toml_attempt = toml_completeness::complete_module (&mut parsed_toml); if let Err(e) = completed_toml_attempt { match e { toml_completeness::CompleteError::NotEnoughInfo(e) => { println!( "Missing information in {}:\n{}", input_file_name, e ) } toml_completeness::CompleteError::BadTomlType(e) => { println!( "Invalid type for a build parameter in {}:\n{}", input_file_name, e ) } toml_completeness::CompleteError::DependencyFail(e) => { println!( "Error with one of the dependencies in {}:\n{}", input_file_name, e ) } } return Err(()); } let generate_toml_attempt = toml_generator::generate_toml(output_file_name, &mut parsed_toml); if let Err(e) = generate_toml_attempt { match e { toml_generator::GenerateFailed::FileError(e) => println!("Failed to write to {}:\n{}", output_file_name, e), toml_generator::GenerateFailed::ConvertFailed(e) => println!("Error converting the comb-created build toml to a string:\n{}", e), } return Err(()); } Ok(()) }
/// Email an email address belonging to a user #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct Email { pub email: Option<String>, pub primary: Option<bool>, pub verified: Option<bool>, } impl Email { /// Create a builder for this object. #[inline] pub fn builder() -> EmailBuilder { EmailBuilder { body: Default::default(), } } #[inline] pub fn user_list_emails() -> EmailGetBuilder { EmailGetBuilder } } impl Into<Email> for EmailBuilder { fn into(self) -> Email { self.body } } /// Builder for [`Email`](./struct.Email.html) object. #[derive(Debug, Clone)] pub struct EmailBuilder { body: self::Email, } impl EmailBuilder { #[inline] pub fn email(mut self, value: impl Into<String>) -> Self { self.body.email = Some(value.into()); self } #[inline] pub fn primary(mut self, value: impl Into<bool>) -> Self { self.body.primary = Some(value.into()); self } #[inline] pub fn verified(mut self, value: impl Into<bool>) -> Self { self.body.verified = Some(value.into()); self } } /// Builder created by [`Email::user_list_emails`](./struct.Email.html#method.user_list_emails) method for a `GET` operation associated with `Email`. #[derive(Debug, Clone)] pub struct EmailGetBuilder; impl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for EmailGetBuilder { type Output = Vec<Email>; const METHOD: http::Method = http::Method::GET; fn rel_path(&self) -> std::borrow::Cow<'static, str> { "/user/emails".into() } }
//! //! This module gives a handler for the invalid opcode exception. //! use super::InterruptStackFrame; pub extern "C" fn invalid_opcode_handler(esf: *const InterruptStackFrame) -> ! { unsafe { panic_args!("Invalid opcode \ \n\t\tInstruction Pointer: 0x{:x} \ \n\t\tCode segment: {} \ \n\t\tEFLAGS: 0x{:x}", (*esf).instruction_pointer, (*esf).code_segment, (*esf).eflags); } }
use mantle::kernel; use mantle::kio; use ::core; static mut DEFAULT_STACK: [u8; 65536] = [0; 65536]; extern { fn mantle_main(bi: &kernel::BootInfo, executable_start: usize); } #[no_mangle] #[naked] pub unsafe extern fn _start() { asm!(" .extern __executable_start add $$65536, %rsp andq $$0xfffffffffffffff0, %rsp movq $$__executable_start, %rsi call mantle_start _halt: jmp _halt " ::"{rsp}" (&DEFAULT_STACK):: "volatile"); } #[no_mangle] pub unsafe extern fn mantle_start(bootinfo_addr: usize, executable_start: usize) { let bootinfo = unsafe { (bootinfo_addr as *const kernel::BootInfo).as_ref().unwrap() }; mantle_main(bootinfo, executable_start); panic!("returned from main!"); } #[lang = "eh_personality"] extern fn eh_personality() {} #[lang = "panic_fmt"] #[no_mangle] pub extern fn panic_fmt(fmt: core::fmt::Arguments, file: &'static str, line: u32) -> ! { debug!("panicked at {}:{}: {}", file, line, fmt); for c in "[panic] HANG\n".bytes() { kio::debug_put_char(c); } loop {} } #[allow(non_snake_case)] #[no_mangle] pub extern "C" fn _Unwind_Resume() -> ! { panic!("cannot unwind"); }
use super::molecule::tab_menu::{self, TabMenu}; use super::organism::modal_resource::{self, ModalResource}; use crate::arena::{block, resource, ArenaMut, BlockKind, BlockMut, BlockRef}; use crate::libs::random_id::U128Id; use isaribi::{ style, styled::{Style, Styled}, }; use kagura::prelude::*; use nusa::prelude::*; use std::collections::HashSet; mod tab_0; use tab_0::Tab0; pub struct Props { pub arena: ArenaMut, pub world: BlockMut<block::World>, pub data: block::craftboard::Block, } pub enum Msg { NoOp, Sub(On), SetShowingModal(ShowingModal), SetName(String), SetDisplayName0(String), SetDisplayName1(String), SetXSize(f64), SetYSize(f64), SetZSize(f64), SetGridColor(crate::libs::color::Pallet), SetTexture(usize, Option<BlockRef<resource::ImageData>>), SetVoxelDensityX(f64), SetVoxelDensityY(f64), SetVoxelDensityZ(f64), } pub enum ShowingModal { None, SelectTexture(usize), } pub enum On { UpdateBlocks { insert: HashSet<U128Id>, update: HashSet<U128Id>, }, } pub struct RoomModelessCraftboard { arena: ArenaMut, world: BlockMut<block::World>, craftboard: block::craftboard::Block, showing_modal: ShowingModal, element_id: ElementId, } ElementId! { input_craftboard_name, input_craftboard_display_name } impl Component for RoomModelessCraftboard { type Props = Props; type Msg = Msg; type Event = On; } impl HtmlComponent for RoomModelessCraftboard {} impl Constructor for RoomModelessCraftboard { fn constructor(props: Props) -> Self { Self { arena: props.arena, world: props.world, craftboard: props.data, showing_modal: ShowingModal::None, element_id: ElementId::new(), } } } impl Update for RoomModelessCraftboard { fn on_load(mut self: Pin<&mut Self>, props: Props) -> Cmd<Self> { self.arena = props.arena; self.world = props.world; self.craftboard = props.data; Cmd::none() } fn update(mut self: Pin<&mut Self>, msg: Msg) -> Cmd<Self> { match msg { Msg::NoOp => Cmd::none(), Msg::Sub(sub) => Cmd::submit(sub), Msg::SetShowingModal(showing_modal) => { self.showing_modal = showing_modal; Cmd::none() } Msg::SetName(name) => { self.craftboard.update(|craftboard| { craftboard.set_name(name.clone()); }); Cmd::submit(On::UpdateBlocks { insert: set! {}, update: set! { self.craftboard.id() }, }) } Msg::SetDisplayName0(display_name) => { self.craftboard.update(|craftboard| { craftboard.set_display_name((Some(display_name.clone()), None)); }); Cmd::submit(On::UpdateBlocks { insert: set! {}, update: set! { self.craftboard.id() }, }) } Msg::SetDisplayName1(display_name) => { self.craftboard.update(|craftboard| { craftboard.set_display_name((None, Some(display_name.clone()))); }); Cmd::submit(On::UpdateBlocks { insert: set! {}, update: set! { self.craftboard.id() }, }) } Msg::SetXSize(x_size) => { self.craftboard.update(|craftboard| { let s = craftboard.size(); craftboard.set_size([x_size, s[1], s[2]]) }); Cmd::submit(On::UpdateBlocks { insert: set! {}, update: set! { self.craftboard.id() }, }) } Msg::SetYSize(y_size) => { self.craftboard.update(|craftboard| { let s = craftboard.size(); craftboard.set_size([s[0], y_size, s[2]]) }); Cmd::submit(On::UpdateBlocks { insert: set! {}, update: set! { self.craftboard.id() }, }) } Msg::SetZSize(z_size) => { self.craftboard.update(|craftboard| { let s = craftboard.size(); craftboard.set_size([s[0], s[1], z_size]) }); Cmd::submit(On::UpdateBlocks { insert: set! {}, update: set! { self.craftboard.id() }, }) } Msg::SetGridColor(grid_color) => { self.craftboard.update(|craftboard| { craftboard.set_grid_color(grid_color); }); Cmd::submit(On::UpdateBlocks { insert: set! {}, update: set! { self.craftboard.id() }, }) } Msg::SetTexture(tex_idx, texture) => { self.craftboard.update(|craftboard| { let mut textures = craftboard.textures().clone(); textures[tex_idx] = texture.clone(); craftboard.set_textures(textures); }); self.showing_modal = ShowingModal::None; Cmd::submit(On::UpdateBlocks { insert: set! {}, update: set! { self.craftboard.id() }, }) } Msg::SetVoxelDensityX(vd_x) => { self.craftboard.update(|craftboard| { let vd = craftboard.voxel_density(); craftboard.set_voxel_density([vd_x, vd[1], vd[2]]) }); Cmd::submit(On::UpdateBlocks { insert: set! {}, update: set! { self.craftboard.id() }, }) } Msg::SetVoxelDensityY(vd_y) => { self.craftboard.update(|craftboard| { let vd = craftboard.voxel_density(); craftboard.set_voxel_density([vd[0], vd_y, vd[2]]) }); Cmd::submit(On::UpdateBlocks { insert: set! {}, update: set! { self.craftboard.id() }, }) } Msg::SetVoxelDensityZ(vd_z) => { self.craftboard.update(|craftboard| { let vd = craftboard.voxel_density(); craftboard.set_voxel_density([vd[0], vd[1], vd_z]) }); Cmd::submit(On::UpdateBlocks { insert: set! {}, update: set! { self.craftboard.id() }, }) } } } } impl Render<Html> for RoomModelessCraftboard { type Children = (); fn render(&self, _: Self::Children) -> Html { Self::styled(Html::fragment(vec![ self.render_tabs(), match &self.showing_modal { ShowingModal::None => Html::none(), ShowingModal::SelectTexture(tex_idx) => ModalResource::empty( self, None, modal_resource::Props { arena: ArenaMut::clone(&self.arena), world: BlockMut::clone(&self.world), title: String::from(modal_resource::title::SELECT_TEXTURE), filter: set! { BlockKind::ImageData }, is_selecter: true, }, Sub::map({ let tex_idx = *tex_idx; move |sub| match sub { modal_resource::On::Close => Msg::SetShowingModal(ShowingModal::None), modal_resource::On::UpdateBlocks { insert, update } => { Msg::Sub(On::UpdateBlocks { insert, update }) } modal_resource::On::SelectImageData(texture) => { Msg::SetTexture(tex_idx, Some(texture)) } modal_resource::On::SelectNone => Msg::SetTexture(tex_idx, None), _ => Msg::NoOp, } }), ), }, ])) } } impl RoomModelessCraftboard { fn render_tabs(&self) -> Html { Html::div( Attributes::new().class(Self::class("base")), Events::new(), vec![TabMenu::new( self, None, tab_menu::Props { selected: 0, controlled: false, }, Sub::none(), ( Attributes::new(), Events::new(), vec![( Html::text("Common"), Tab0::empty( self, None, tab_0::Props { craftboard: block::craftboard::Block::clone(&self.craftboard), }, Sub::map(|sub| match sub { tab_0::On::OpenModal(modal_kind) => { Msg::SetShowingModal(modal_kind) } tab_0::On::SetDisplayName0(dn_0) => Msg::SetDisplayName0(dn_0), tab_0::On::SetDisplayName1(dn_1) => Msg::SetDisplayName1(dn_1), tab_0::On::SetGridColor(pallet) => Msg::SetGridColor(pallet), tab_0::On::SetName(name) => Msg::SetName(name), tab_0::On::SetXSize(x_size) => Msg::SetXSize(x_size), tab_0::On::SetYSize(y_size) => Msg::SetYSize(y_size), tab_0::On::SetZSize(y_size) => Msg::SetZSize(y_size), tab_0::On::SetVoxelDensityX(vd_x) => Msg::SetVoxelDensityX(vd_x), tab_0::On::SetVoxelDensityY(vd_y) => Msg::SetVoxelDensityY(vd_y), tab_0::On::SetVoxelDensityZ(vd_z) => Msg::SetVoxelDensityZ(vd_z), }), ), )], ), )], ) } } impl Styled for RoomModelessCraftboard { fn style() -> Style { style! { ".base" { "width": "100%"; "height": "100%"; "padding-top": ".65rem"; } } } }
use crate::obj::Twzobj; use crate::ptr::Pptr; use std::ffi::c_void; /* TODO: handle allocation failures */ extern "C" { fn __runtime_twz_object_init_alloc(base: *const c_void, off: u64) -> i32; fn __runtime_twz_object_realloc( base: *const c_void, p: *const c_void, owner: *mut *const c_void, len: usize, flags: u64, ) -> i32; fn __runtime_twz_object_free(base: *const c_void, p: *const c_void, owner: *mut *const c_void, flags: u64) -> i32; fn __runtime_twz_object_alloc( base: *const c_void, len: usize, owner: *mut *const c_void, flags: u64, ctor: extern "C" fn(*mut c_void, *const c_void), data: *const c_void, ) -> i32; } impl<T> Twzobj<T> { unsafe fn raw_base_void(&self) -> *const c_void { std::mem::transmute::<&T, *const c_void>(&*self.base()) } pub(crate) fn raw_init_alloc(&self, offset: usize) { unsafe { __runtime_twz_object_init_alloc(self.raw_base_void(), offset as u64); } } pub(crate) fn allocate_copy_item<R>(&self, owner: &mut u64, item: R) { extern "C" fn do_the_move<R>(tgt: &mut R, src: &R) { unsafe { std::ptr::copy_nonoverlapping(src as *const R, tgt as *mut R, 1); } } unsafe { __runtime_twz_object_alloc( self.raw_base_void(), std::mem::size_of::<R>(), std::mem::transmute::<&mut u64, *mut *const c_void>(owner), (std::mem::align_of::<R>() as u64) << 32, std::mem::transmute::<extern "C" fn(&mut R, &R), extern "C" fn(*mut c_void, *const c_void)>( do_the_move, ), std::mem::transmute::<&R, *const c_void>(&item), ); } } pub(crate) fn allocate_ctor_item<R, X>( &self, owner: &mut Pptr<R>, ctor: &(dyn Fn(&mut std::mem::MaybeUninit<R>, Option<&X>) + 'static), data: Option<&X>, ) { extern "C" fn trampoline<R, X>( tgt: &mut std::mem::MaybeUninit<R>, src: &( &(dyn Fn(&mut std::mem::MaybeUninit<R>, Option<&X>) + 'static), Option<&X>, ), ) { let (ctor, src) = src; ctor(tgt, *src); } unsafe { __runtime_twz_object_alloc( self.raw_base_void(), std::mem::size_of::<R>(), std::mem::transmute::<&mut u64, *mut *const c_void>(&mut owner.p), (std::mem::align_of::<R>() as u64) << 32, std::mem::transmute::< extern "C" fn( &mut std::mem::MaybeUninit<R>, &( &(dyn Fn(&mut std::mem::MaybeUninit<R>, Option<&X>) + 'static), Option<&X>, ), ), extern "C" fn(*mut c_void, *const c_void), >(trampoline), std::mem::transmute::< &( &(dyn Fn(&mut std::mem::MaybeUninit<R>, Option<&X>) + 'static), Option<&X>, ), *const c_void, >(&(ctor, data)), ); } } }
use super::super::atom::{ btn::Btn, common::Common, fa, heading::{self, Heading}, slider::{self, Slider}, text::Text, }; use super::super::organism::{ popup_color_pallet::{self, PopupColorPallet}, room_modeless::RoomModeless, }; use super::ShowingModal; use crate::arena::{block, BlockMut}; use crate::libs::color::Pallet; use isaribi::{ style, styled::{Style, Styled}, }; use kagura::prelude::*; use nusa::prelude::*; pub struct Props { pub textboard: block::textboard::Block, } pub enum Msg { NoOp, Sub(On), } pub enum On { OpenModal(ShowingModal), SetTitle(String), SetText(String), SetXSize(f64), SetZSize(f64), SetFontSize(f64), SetColor(Pallet), } pub struct Tab0 { textboard: block::textboard::Block, element_id: ElementId, } ElementId! { input_textboard_title } impl Component for Tab0 { type Props = Props; type Msg = Msg; type Event = On; } impl HtmlComponent for Tab0 {} impl Constructor for Tab0 { fn constructor(props: Self::Props) -> Self { Self { textboard: props.textboard, element_id: ElementId::new(), } } } impl Update for Tab0 { fn on_load(mut self: Pin<&mut Self>, props: Self::Props) -> Cmd<Self> { self.textboard = props.textboard; Cmd::none() } fn update(self: Pin<&mut Self>, msg: Self::Msg) -> Cmd<Self> { match msg { Msg::NoOp => Cmd::none(), Msg::Sub(event) => Cmd::submit(event), } } } impl Render<Html> for Tab0 { type Children = (); fn render(&self, _: Self::Children) -> Html { Self::styled(Html::div( Attributes::new() .class(RoomModeless::class("common-base")) .class("pure-form"), Events::new(), vec![ self.textboard .map(|data| self.render_header(data)) .unwrap_or(Common::none()), self.textboard .map(|data| self.render_main(data)) .unwrap_or(Common::none()), ], )) } } impl Tab0 { fn render_header(&self, textboard: &block::Textboard) -> Html { Html::div( Attributes::new().class(RoomModeless::class("common-header")), Events::new(), vec![ Html::label( Attributes::new() .class(RoomModeless::class("common-label")) .string("for", &self.element_id.input_textboard_title), Events::new(), vec![fa::fas_i("fa-file-lines")], ), Html::input( Attributes::new() .id(&self.element_id.input_textboard_title) .value(textboard.title()), Events::new().on_input(self, |title| Msg::Sub(On::SetTitle(title))), vec![], ), ], ) } fn render_main(&self, textboard: &block::Textboard) -> Html { Html::div( Attributes::new().class(Self::class("main")), Events::new(), vec![ self.render_props(textboard), Heading::h3( heading::Variant::Light, Attributes::new(), Events::new(), vec![Html::text("テキスト")], ), self.render_text(textboard), ], ) } fn render_props(&self, textboard: &block::Textboard) -> Html { Html::div( Attributes::new().class(Self::class("content")), Events::new(), vec![ Html::div( Attributes::new().class(Common::keyvalue()), Events::new(), vec![ Text::span("X幅(横幅)"), Slider::new( self, None, slider::Position::Linear { min: 2.0, max: 10.0, val: textboard.size()[0], step: 0.5, }, Sub::map(move |sub| match sub { slider::On::Input(x) => Msg::Sub(On::SetXSize(x)), _ => Msg::NoOp, }), slider::Props { range_is_editable: false, theme: slider::Theme::Light, }, ), Text::span("Z幅(高さ)"), Slider::new( self, None, slider::Position::Linear { min: 2.0, max: 10.0, val: textboard.size()[1], step: 0.5, }, Sub::map(move |sub| match sub { slider::On::Input(z) => Msg::Sub(On::SetZSize(z)), _ => Msg::NoOp, }), slider::Props { range_is_editable: false, theme: slider::Theme::Light, }, ), ], ), Html::div( Attributes::new().class(Common::keyvalue()), Events::new(), vec![ Text::span("文字サイズ"), Slider::new( self, None, slider::Position::Linear { min: 0.1, max: 1.0, val: textboard.font_size(), step: 0.025, }, Sub::map(move |sub| match sub { slider::On::Input(x) => Msg::Sub(On::SetFontSize(x)), _ => Msg::NoOp, }), slider::Props { range_is_editable: false, theme: slider::Theme::Light, }, ), Text::span("色"), PopupColorPallet::empty( self, None, popup_color_pallet::Props { direction: popup_color_pallet::Direction::Bottom, default_selected: textboard.color().clone(), }, Sub::map(|sub| match sub { popup_color_pallet::On::SelectColor(color) => { Msg::Sub(On::SetColor(color)) } }), ), ], ), ], ) } fn render_text(&self, textboard: &block::Textboard) -> Html { Html::div( Attributes::new().class(Self::class("content")), Events::new(), vec![Html::textarea( Attributes::new().value(textboard.text()), Events::new().on_input(self, |text| Msg::Sub(On::SetText(text))), vec![], )], ) } } impl Styled for Tab0 { fn style() -> Style { style! { ".main" { "display": "grid"; "grid-template-columns": "1fr"; "grid-auto-rows": "max-content"; "overflow-y": "scroll"; } ".content" { "display": "grid"; "column-gap": ".65rem"; "row-gap": ".65rem"; "align-items": "start"; "padding-left": ".65rem"; "padding-right": ".65rem"; "grid-template-columns": "repeat(auto-fit, minmax(20rem, 1fr))"; "grid-auto-rows": "max-content"; } ".content img" { "width": "100%"; "max-height": "20rem"; "object-fit": "contain"; } } } }
use actix::prelude::Message; use actix::{Handler, MessageResult}; use chrono; use diesel; use diesel::prelude::*; use uuid; use crate::db::schema::span; #[derive(Debug, Insertable, Queryable)] #[table_name = "span"] pub struct SpanDb { pub trace_id: String, pub id: String, parent_id: Option<String>, name: Option<String>, kind: Option<String>, duration: Option<i64>, ts: Option<chrono::NaiveDateTime>, debug: bool, shared: bool, local_endpoint_id: Option<String>, remote_endpoint_id: Option<String>, } use crate::db::schema::endpoint; #[derive(Debug, Insertable, Queryable)] #[table_name = "endpoint"] pub struct EndpointDb { endpoint_id: String, service_name: Option<String>, ipv4: Option<String>, ipv6: Option<String>, port: Option<i32>, } use crate::db::schema::tag; #[derive(Debug, Insertable, Queryable)] #[table_name = "tag"] pub struct TagDb { span_id: String, name: String, value: String, } use crate::db::schema::annotation; #[derive(Debug, Insertable, Queryable)] #[table_name = "annotation"] pub struct AnnotationDb { annotation_id: String, trace_id: String, span_id: String, ts: chrono::NaiveDateTime, value: String, } struct FromSpan { span_db: SpanDb, local_endpoint: Option<EndpointDb>, remote_endpoint: Option<EndpointDb>, tags: Vec<TagDb>, annotations: Vec<AnnotationDb>, } fn get_all_from_span(span: &crate::opentracing::Span) -> FromSpan { let trace_id = span.trace_id.clone(); let span_id = span.id.clone(); let span_db = SpanDb { trace_id: trace_id.clone(), id: span_id.clone(), parent_id: span.parent_id.clone(), name: span.name.clone().map(|s| s.to_lowercase()), kind: span.kind.clone().map(|k| k.to_string()), duration: span.duration, ts: span.timestamp.map(|ts| { // span timestamp is in microseconds chrono::NaiveDateTime::from_timestamp( ts / 1000 / 1000, (ts % (1000 * 1000) * 1000) as u32, ) }), debug: span.debug, shared: span.shared, local_endpoint_id: None, remote_endpoint_id: None, }; let local_endpoint = if let Some(endpoint) = span.local_endpoint.clone() { Some(EndpointDb { endpoint_id: "n/a".to_string(), service_name: endpoint.service_name.map(|s| s.to_lowercase()), ipv4: endpoint.ipv4, ipv6: endpoint.ipv6, port: endpoint.port, }) } else { None }; let remote_endpoint = if let Some(endpoint) = span.remote_endpoint.clone() { Some(EndpointDb { endpoint_id: "n/a".to_string(), service_name: endpoint.service_name.map(|s| s.to_lowercase()), ipv4: endpoint.ipv4, ipv6: endpoint.ipv6, port: endpoint.port, }) } else { None }; let annotations = span .annotations .iter() .map(|annotation| { AnnotationDb { trace_id: trace_id.clone(), span_id: span_id.clone(), annotation_id: uuid::Uuid::new_v4().to_hyphenated().to_string(), ts: chrono::NaiveDateTime::from_timestamp( // timestamp is in microseconds annotation.timestamp / 1000 / 1000, (annotation.timestamp % 1000 * 1000) as u32, ), value: annotation.value.clone(), } }) .collect(); let tags = span .tags .iter() .map(|(key, value)| TagDb { span_id: span_id.clone(), name: key.clone().to_lowercase(), value: value.clone(), }) .collect(); FromSpan { span_db, local_endpoint, remote_endpoint, annotations, tags, } } impl Message for crate::opentracing::Span { type Result = crate::opentracing::Span; } impl super::DbExecutor { fn find_endpoint(&mut self, ep: &EndpointDb) -> Option<EndpointDb> { use super::super::schema::endpoint::dsl::*; let mut query = endpoint.into_boxed(); if let Some(query_service_name) = ep.service_name.clone() { query = query.filter(service_name.eq(query_service_name)); } if let Some(query_ipv4) = ep.ipv4.clone() { query = query.filter(ipv4.eq(query_ipv4)); } if let Some(query_ipv6) = ep.ipv6.clone() { query = query.filter(ipv6.eq(query_ipv6)); } if let Some(query_port) = ep.port { query = query.filter(port.eq(query_port)); } query .first::<EndpointDb>(self.0.as_ref().expect("fail to get DB")) .ok() } fn upsert_endpoint(&mut self, ep: Option<EndpointDb>) -> Option<String> { if let Some(le) = ep { use super::super::schema::endpoint::dsl::*; match self.find_endpoint(&le) { Some(existing) => Some(existing.endpoint_id), None => { let new_id = uuid::Uuid::new_v4().to_hyphenated().to_string(); let could_insert = diesel::insert_into(endpoint) .values(&EndpointDb { endpoint_id: new_id.clone(), service_name: le.service_name.clone(), ipv4: le.ipv4.clone(), ipv6: le.ipv6.clone(), port: le.port, }) .execute(self.0.as_ref().expect("fail to get DB")); if could_insert.is_err() { self.find_endpoint(&le).map(|existing| existing.endpoint_id) } else { Some(new_id) } } } } else { None } } } impl Handler<crate::opentracing::Span> for super::DbExecutor { type Result = MessageResult<crate::opentracing::Span>; fn handle(&mut self, msg: crate::opentracing::Span, ctx: &mut Self::Context) -> Self::Result { self.check_db_connection(ctx); let mut to_upsert = get_all_from_span(&msg); to_upsert.span_db.local_endpoint_id = self.upsert_endpoint(to_upsert.local_endpoint); to_upsert.span_db.remote_endpoint_id = self.upsert_endpoint(to_upsert.remote_endpoint); let _span_in_db = { use super::super::schema::span::dsl::*; match span .filter( id.eq(&to_upsert.span_db.id) .and(trace_id.eq(&to_upsert.span_db.trace_id)), ) .first::<SpanDb>(self.0.as_ref().expect("fail to get DB")) { Ok(_) => { //TODO: manage more update cases than duration diesel::update( span.filter( id.eq(&to_upsert.span_db.id) .and(trace_id.eq(&to_upsert.span_db.trace_id)), ), ) .set(duration.eq(to_upsert.span_db.duration)) .execute(self.0.as_ref().expect("fail to get DB")) .map_err(|err| self.reconnect_if_needed(ctx, &err)) } Err(_) => diesel::insert_into(span) .values(&to_upsert.span_db) .execute(self.0.as_ref().expect("fail to get DB")) .map_err(|err| self.reconnect_if_needed(ctx, &err)), } }; { use super::super::schema::annotation::dsl::*; to_upsert.annotations.iter().for_each(|item| { diesel::insert_into(annotation) .values(item) .execute(self.0.as_ref().expect("fail to get DB")) .ok(); }); } { use super::super::schema::tag::dsl::*; let existing_tags = tag .select(name) .filter( span_id .eq(to_upsert.span_db.id) .and(name.eq_any(to_upsert.tags.iter().map(|item| item.name.clone()))), ) .load::<String>(self.0.as_ref().expect("fail to get DB")) .ok() .unwrap_or_else(|| vec![]); to_upsert.tags.iter().for_each(|item| { if existing_tags.contains(&item.name) { diesel::update(tag.filter(span_id.eq(&item.span_id).and(name.eq(&item.name)))) .set(value.eq(&item.value)) .execute(self.0.as_ref().expect("fail to get DB")) .ok(); } else { diesel::insert_into(tag) .values(item) .execute(self.0.as_ref().expect("fail to get DB")) .ok(); } }); } MessageResult(msg) } }
use crate::bus::Bus; use twz::device::{BusType, Device}; use twz::TwzErr; const PCIE_BUS_HEADER_MAGIC: u32 = 0x88582323; const PCIE_HEADER_MULTIFUNCTION: u8 = 1 << 7; const KACTION_CMD_PCIE_INIT_DEVICE: i64 = 1; pub struct PcieBus { root: Device, } #[derive(Debug, Copy, Clone)] #[repr(C)] struct PcieInfo { magic: u32, start_bus: u32, end_bus: u32, segnr: u32, flags: u64, } #[derive(Debug, Copy, Clone)] #[repr(C)] struct PcieFunctionInfo { deviceid: u16, vendorid: u16, classid: u16, subclassid: u16, progif: u16, flags: u16, bus: u16, device: u16, function: u16, segment: u16, header_type: u8, resv2: u8, resv: u16, prefetch: [u32; 6], bars: [u64; 6], barsz: [u64; 6], } #[repr(C, packed)] struct PcieConfigSpaceHdr { /* 0x00 */ vendor_id: u16, device_id: u16, /* 0x04 */ command: u16, status: u16, /* 0x08 */ revision: u8, progif: u8, subclass: u8, class_code: u8, /* 0x0C */ cache_line_size: u8, latency_timer: u8, header_type: u8, bist: u8, } /* #[repr(C, packed)] struct PcieConfigSpaceDevice { hdr: PcieConfigSpaceHdr, /* 0x10 */ bar: [u32; 6], /* 0x28 */ cardbus_cis_pointer: u32, /* 0x2C */ subsystem_vendor_id: u16, subsystem_id: u16, /* 0x30 */ expansion_rom_base_address: u32, /* 0x34 */ cap_ptr: u32, /* 0x38 */ reserved1: u32, /* 0x3C */ interrupt_line: u8, interrupt_pin: u8, min_grant: u8, max_latency: u8, } */ #[repr(C, packed)] struct PcieConfigSpaceBridge { hdr: PcieConfigSpaceHdr, bar: [u32; 2], primary_bus_nr: u8, secondary_bus_nr: u8, subordinate_bus_nr: u8, secondary_latency_timer: u8, io_base: u8, io_limit: u8, secondary_status: u8, memory_base: u16, memory_limit: u16, pref_memory_base: u16, pref_memory_limit: u16, /* 28 */ pref_base_upper: u32, pref_limit_upper: u32, io_base_upper: u16, io_limit_upper: u16, cap_ptr: u32, exp_rom_base: u32, interrupt_line: u8, interrupt_pin: u8, bridge_control: u16, } impl PcieBus { fn init_device(&mut self, info: &PcieInfo, bus: u32, device: u32, function: u32) { let wc: i64 = 0; self.root.kaction( KACTION_CMD_PCIE_INIT_DEVICE, (info.segnr as i64) << 16 | (bus as i64) << 8 | (device as i64) << 3 | (function as i64) | (wc << 32), ); } fn init_bridge(&mut self, info: &PcieInfo, bus: u32, device: u32, function: u32) -> Option<u32> { let addr = ((bus - info.start_bus) as u64) << 20 | (device as u64) << 15 | (function as u64) << 12; let mmio = self.root.get_child_mmio(0); if let Some(mmio) = mmio { let mmio = mmio.access_offset::<PcieConfigSpaceBridge>(addr); Some(mmio.secondary_bus_nr as u32) } else { None } } fn scan_bus(&mut self, info: &PcieInfo, bus: u32) -> Vec<(u8, u32, u32, u32)> { let mut devices = vec![]; 'outer: for device in 0..32 { 'inner: for function in 0..8 { let addr = ((bus - info.start_bus) as u64) << 20 | (device as u64) << 15 | (function as u64) << 12; let mmio = self.root.get_child_mmio(0); if let Some(mmio) = mmio { let mmio = mmio.access_offset::<PcieConfigSpaceHdr>(addr); let vendor = mmio.vendor_id; if vendor != 0xffff { /* Okay, this is a real device */ devices.push((mmio.header_type, bus, device, function)); } if function == 0 && (mmio.header_type & PCIE_HEADER_MULTIFUNCTION) == 0 { break 'inner; } } else { break 'outer; } } } devices } } use crate::devtree::DeviceIdent; impl Bus for PcieBus { fn identify(&self, dev: &mut Device) -> Option<DeviceIdent> { let devinfo = *dev.get_child_info::<PcieFunctionInfo>(0).unwrap().base_data(); Some(DeviceIdent::new( Self::get_bus_type(), devinfo.vendorid, devinfo.deviceid, devinfo.classid, devinfo.subclassid, )) } fn get_bus_root(&self) -> &Device { &self.root } fn get_bus_type() -> BusType { BusType::Pcie } fn new(root: Device) -> Self { PcieBus { root: root } } fn init(&mut self) -> Result<(), TwzErr> { let info = *self.root.get_child_info::<PcieInfo>(0).unwrap().base_data(); if info.magic != PCIE_BUS_HEADER_MAGIC { return Err(TwzErr::Invalid); } /* the root complex uses bus 0 as its main, and we expect any bridges to be placed on this * bus to give us access to other busses. */ let mut devices = self.scan_bus(&info, 0); /* scan through any bridges, adding devices to the list */ let mut i = 0; loop { let (hdr_type, bus, device, function) = devices[i]; if hdr_type & 0x7f == 1 { let secondary_bus = self.init_bridge(&info, bus, device, function); if let Some(secondary_bus) = secondary_bus { let mut children = self.scan_bus(&info, secondary_bus); devices.append(&mut children); } } i += 1; if i >= devices.len() { break; } } /* finally, actually initialize devices */ for (_, bus, device, function) in devices { self.init_device(&info, bus, device, function); } Ok(()) } }
fn retrieve_seat_id(val: &str) -> i32 { let row : i32 = val.char_indices() .take(7) .map(|(i, c)| if c=='B' { 1 << (6-i) } else { 0 }) .sum(); let col : i32 = val.char_indices() .skip(7) .take(3) .map(|(i, c)| if c=='R' { 1 << (2-(i-7)) } else { 0 }) .sum(); row * 8 + col } fn find_missing(seats : &[i32]) -> Option<i32> { let one_off = seats.iter().skip(1); let zipped = seats.iter().zip(one_off); for pair in zipped { if (pair.1 - pair.0) > 1 { return Some(pair.0 + 1); } } None } fn main() { let test_vals = [ "BFFFBBFRRR", "FFFBBBFRRR", "BBFFBBFRLL", ]; for val in test_vals.iter() { println!("val: {:?} result: {:?}", val, retrieve_seat_id(val)); } let real_vals = include_str!("day5.txt"); let seat_ids = real_vals.lines() .map(|seat| retrieve_seat_id(seat)); let highest = seat_ids.clone().max().unwrap(); println!("Highest val for real is {}", highest); let mut sorted_seat_ids : Vec<i32> = seat_ids.collect(); sorted_seat_ids.sort(); println!("My seat is {}", find_missing(&sorted_seat_ids).unwrap()); }
use crate::er::Result; use crate::project::ProjectConfig; use crate::server::{SshConn, SyncBase, SyncSentCache, SyncSet}; use crate::utils::{self, CliEnv}; use failure::format_err; use indexmap::IndexMap; use serde::{Deserialize, Serialize}; use std::path::{Path, PathBuf}; use std::process; #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct ComposeYml { pub version: String, pub services: IndexMap<String, ComposeService>, } #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct ComposeService { pub volumes: Vec<String>, pub environment: IndexMap<String, String>, } impl ComposeYml { pub fn serialize(&self) -> Result<String> { match serde_yaml::to_string::<ComposeYml>(self) { Ok(yml_str) => Ok(yml_str), Err(e) => return Err(format_err!("{:?}", e)), } } pub fn save_if_diff(&self, path: &Path) -> Result<()> { if path.is_file() { let cur_str = std::fs::read_to_string(path)?; let current_yml: ComposeYml = serde_yaml::from_str(&cur_str)?; if self != &current_yml { let yml_str = self.serialize()?; println!("ComposeYml diff (old - new):"); println!("{}", current_yml.serialize()?); println!("{}", &yml_str); utils::write_file(path, &yml_str)?; println!("Wrote {}", path.to_string_lossy()); Ok(()) } else { Ok(()) } } else { let yml_str = self.serialize()?; println!("ComposeYml new:"); println!("{}", &yml_str); utils::write_file(path, &yml_str)?; println!("Wrote {}", path.to_string_lossy()); Ok(()) } } } pub fn rebuild_container( env: &CliEnv, current_process: utils::CurrentProcess, project: &ProjectConfig, service: String, ) -> Result<utils::CurrentProcess> { println!("Rebuilding and restarting service: {}", service); // Todo: Should do the following only if dev is running // Todo: Option to remove volumes? let p = dev_cmds( env, current_process, project, vec![ vec!["build".to_string(), service.clone()], vec!["up".to_string(), "-d".to_string(), service.clone()], ], )?; Ok(p) } /// Generic rebuild for a rust project with a container /// by the same name /// Assets are files or folders that should be moved from the rust source /// into the container build folder pub fn rebuild_rust_container( env: &CliEnv, project_name: &str, assets: &Vec<PathBuf>, mut current_process: utils::CurrentProcess, project: &mut ProjectConfig, prod: bool, ) -> Result<utils::CurrentProcess> { // This may change, but assuming rust project uses underscores, while // docker container is named with hyphens. Underscores has problems in docker // when attempting to use as url's with standard request libraries (as underscore // is not allowed in domains I believe) let hyphen_name = project_name.replace('_', "-"); // Build rust binary, release if prod = true let (process, output_file) = rust_build(env, project_name, current_process, prod)?; current_process = process; let container_build_folder = if prod { env.workdir_dir.join(format!("server/prod/{}", hyphen_name)) } else { env.workdir_dir.join(format!("server/dev/{}", hyphen_name)) }; let container_file = container_build_folder.join(&project_name); // Move output file to prod container std::fs::rename(output_file, container_file)?; // Assets let rust_project = env.workdir_dir.join(&format!("tools/{}", project_name)); // Don't need sent cache since it should resolve nicely with modified times let mut sync_set = SyncSet::new( SyncBase::local(&rust_project), SyncBase::local(&container_build_folder), SyncSentCache::None, ); for asset in assets { sync_set.resolve_local(rust_project.join(asset), false)?; } sync_set.sync_plain()?; if prod { let server = project.require_server(env)?; // todo: It would be nice with some abstraction for a location that kept the // connection in this case. crate::server::dockerfiles_to_server(env, &server)?; crate::project::prod_cmds( env, project, vec![ vec!["build".to_string(), hyphen_name], vec!["up".to_string(), "-d".to_string()], ], true, )?; Ok(current_process) } else { rebuild_container(env, current_process, project, hyphen_name) } } /// Rebuilds proxy dev binary, then container and restarts the container /// for given project /// Todo: Replace these with rebuild_rust_container above pub fn rebuild_proxy_dev( env: &CliEnv, mut current_process: utils::CurrentProcess, project: &ProjectConfig, ) -> Result<utils::CurrentProcess> { let (process, output_file) = rust_build(env, "proxy", current_process, false)?; current_process = process; // Move output file to proxy dev container let container_file = env.workdir_dir.join("server/dev/proxy-dev/proxy"); std::fs::rename(output_file, container_file)?; rebuild_container(env, current_process, project, "proxy".into()) } /// Rebuilds proxy prod binary, then container and restarts the container /// for given project pub fn rebuild_proxy_prod( env: &CliEnv, mut current_process: utils::CurrentProcess, project: &mut ProjectConfig, ) -> Result<utils::CurrentProcess> { let (process, output_file) = rust_build(env, "proxy", current_process, true)?; current_process = process; // Move output file to proxy dev container let container_file = env.workdir_dir.join("server/prod/proxy-prod/proxy"); std::fs::rename(output_file, container_file)?; let server = project.require_server(env)?; // todo: It would be nice with some abstraction for a location that kept the // connection in this case. crate::server::dockerfiles_to_server(env, &server)?; crate::project::prod_cmds( env, project, vec![ vec!["build".to_string(), "proxy".to_string()], vec!["up".to_string(), "-d".to_string()], ], true, )?; Ok(current_process) } /// Convencience for single command #[inline] pub fn dev_cmd( env: &CliEnv, current_process: utils::CurrentProcess, project: &ProjectConfig, user_args: Vec<String>, ) -> Result<utils::CurrentProcess> { dev_cmds(env, current_process, project, vec![user_args]) } pub fn rust_build_init(env: &CliEnv, mut current_process: utils::CurrentProcess) -> Result<()> { // Ensure ssh service is up let build_container_dir = env.workdir_dir.join("server/build"); std::env::set_current_dir(build_container_dir)?; // Ensure containers are updated let mut cmd = process::Command::new("docker-compose"); cmd.args(&["build"]); // todo: Should check return codes current_process = current_process.spawn_and_wait(cmd, false)?; // Start containers let mut cmd = process::Command::new("docker-compose"); cmd.args(&["up", "-d"]); let _ = current_process.spawn_and_wait(cmd, false)?; let ssh = SshConn::connect_container_ssh(env, 9857, "www-data", "www-data", None)?; // Installs cargo etc. There is a mode without docs etc for a typical ci setup, but some might be useful ssh.exec("curl --proto '=https' --tlsv1.2 https://sh.rustup.rs --output rustup-init.sh && sh rustup-init.sh -y")?; // It would be nice to fetch crates.io index Ok(()) } pub fn rust_build_update(env: &CliEnv, mut current_process: utils::CurrentProcess) -> Result<()> { let ssh = SshConn::connect_container_ssh(env, 9857, "www-data", "www-data", None)?; // PATH should be in .profile ssh.exec("PATH=/var/www/.cargo/bin:$PATH && rustup update")?; Ok(()) } /// Copies project code files through ssh to build container, /// runs build in debug or release /// Returns the path to the resulting binary pub fn rust_build( env: &CliEnv, rust_project: &str, current_process: utils::CurrentProcess, release: bool, ) -> Result<(utils::CurrentProcess, PathBuf)> { // Keeping it simple, assuming service is up and initialized (see rust_build_init above), // todo: make more automated (detect running), // though we probably want initialize as separate command in any case let ssh = SshConn::connect_container_ssh(env, 9857, "www-data", "www-data", None)?; // Sync files except target directory let sftp = ssh.sftp()?; let tools_dir = env.workdir_dir.join("tools"); let remote_tools_dir = PathBuf::from("/var/www/tools"); let mut sync_set = SyncSet::new( SyncBase::local(&tools_dir), SyncBase::remote(&remote_tools_dir, &sftp), // todo: Prefix all of these SyncSentCache::load(env, "workdir-rust-build")?, ); sync_set.ignore_rel_path("target"); sync_set.ignore_dirname("node_modules"); sync_set.resolve_local_remote(&tools_dir, &remote_tools_dir, false)?; sync_set.sync_plain()?; let build_cmd = if release { "cargo build --release" } else { "cargo build" }; let rust_project_dir = format!("/var/www/tools/{}", rust_project); // PATH should be in .profile ssh.exec(format!( "PATH=/var/www/.cargo/bin:$PATH && cd {} && {}", rust_project_dir, build_cmd ))?; let (built_binary, output_folder, output_dest) = if release { // Expecting binary/name in cargo file to be the same as given rust_project let output_folder = String::from("/output/release"); let output_dest = format!("{}/{}", output_folder, rust_project); ( format!("/var/www/tools/target/release/{}", rust_project), output_folder, output_dest, ) } else { let output_folder = String::from("/output/debug"); let output_dest = format!("{}/{}", output_folder, rust_project); ( format!("/var/www/tools/target/debug/{}", rust_project), output_folder, output_dest, ) }; // todo: Permissions for "output" is currently set to 777 to allow build user to write ssh.exec(format!( "mkdir -p {} && mv {} {}", output_folder, built_binary, output_dest ))?; let bin_local_path = if release { env.workdir_dir .join(&format!("server/build/output/release/{}", rust_project)) } else { env.workdir_dir .join(&format!("server/build/output/debug/{}", rust_project)) }; Ok((current_process, bin_local_path)) } /// Helper to collect a list of files, /// useful to generate arguments to docker-compose #[derive(Clone)] pub struct ComposeCmd { project_name: String, server_dir: std::path::PathBuf, files: Vec<PathBuf>, user_args: Option<Vec<String>>, } impl ComposeCmd { pub fn local<N: Into<String>>(env: &CliEnv, name: N) -> Self { ComposeCmd { project_name: name.into(), server_dir: env.workdir_dir.join("server"), files: Vec::with_capacity(3), user_args: None, } } pub fn server<N: Into<String>>(name: N) -> Self { ComposeCmd { project_name: name.into(), // todo: Expecting to be called from a project folder // better would be probably absolute path server_dir: PathBuf::from("../../viddler/server"), files: Vec::with_capacity(3), user_args: None, } } pub fn workdir_file<F: AsRef<Path>>(&mut self, compose_file: F) -> &mut Self { self.files.push(self.server_dir.join(compose_file.as_ref())); self } pub fn relative_file<F: AsRef<Path>>(&mut self, compose_file: F) -> &mut Self { self.files.push(compose_file.as_ref().to_owned()); self } pub fn user_args(&mut self, user_args: Vec<String>) -> &mut Self { self.user_args = Some(user_args); self } pub fn to_args(self) -> Vec<String> { let mut args: Vec<String> = self .files .iter() .flat_map(|file| vec!["-f".to_string(), file.to_string_lossy().to_string()]) .collect(); // Project name args.push("-p".into()); args.push(self.project_name); match self.user_args { Some(user_args) => args.extend(user_args), None => { args.push("up".to_string()); args.push("-d".to_string()); } } args } } /// Allows multiple commands // todo: Bit verbose to take String at times pub fn dev_cmds( env: &CliEnv, mut current_process: utils::CurrentProcess, project: &ProjectConfig, cmds: Vec<Vec<String>>, ) -> Result<utils::CurrentProcess> { // Generating local docker // It would be nice to detect changes beforehand // Also it may be a little out of place with wp // specifics here. Some module system would be cool //crate::wp::create_wp_mounts_docker_yml(env, project)?; //crate::wp::create_backup_yml(env, project)?; let project_dir = project.dir(env); println!("{:?}", project_dir); std::env::set_current_dir(project_dir)?; let compose_name = format!("{}-dev", project.name); // Add base compose files let mut compose_cmd = ComposeCmd::local(env, compose_name); compose_cmd .workdir_file("base/docker-compose.yml") .workdir_file("dev/docker-compose.dev.yml"); //.relative_file("docker/mounts.yml"); // todo: Ensure docker-compose is installed for user_args in cmds { let mut compose_cmd = compose_cmd.clone(); if user_args.len() > 0 { compose_cmd.user_args(user_args); } // Run command // By default, the command inherits stdin, out, err // when used with .spawn() let mut cmd = process::Command::new("docker-compose"); cmd.args(compose_cmd.to_args()); //cmd.arg("--remove-orphans"); current_process = current_process.spawn_and_wait(cmd, false)?; } Ok(current_process) }
use crate::domain::domain::SysUser; use crate::domain::vo::SysRoleVO; use serde::{Deserialize, Serialize}; ///登录数据 #[derive(Debug, Serialize, Deserialize, Clone)] pub struct SignInVO { pub user: Option<SysUser>, pub permissions: Vec<String>, pub access_token: String, pub role: Option<SysRoleVO>, } impl ToString for SignInVO { fn to_string(&self) -> String { serde_json::json!(self).to_string() } }
/// An enum to represent all characters in the CJKUnifiedIdeographsExtensionC block. #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub enum CJKUnifiedIdeographsExtensionC { /// \u{2a700}: '𪜀' CjkIdeographExtensionCFirst, /// \u{2b734}: '𫜴' CjkIdeographExtensionCLast, } impl Into<char> for CJKUnifiedIdeographsExtensionC { fn into(self) -> char { match self { CJKUnifiedIdeographsExtensionC::CjkIdeographExtensionCFirst => '𪜀', CJKUnifiedIdeographsExtensionC::CjkIdeographExtensionCLast => '𫜴', } } } impl std::convert::TryFrom<char> for CJKUnifiedIdeographsExtensionC { type Error = (); fn try_from(c: char) -> Result<Self, Self::Error> { match c { '𪜀' => Ok(CJKUnifiedIdeographsExtensionC::CjkIdeographExtensionCFirst), '𫜴' => Ok(CJKUnifiedIdeographsExtensionC::CjkIdeographExtensionCLast), _ => Err(()), } } } impl Into<u32> for CJKUnifiedIdeographsExtensionC { fn into(self) -> u32 { let c: char = self.into(); let hex = c .escape_unicode() .to_string() .replace("\\u{", "") .replace("}", ""); u32::from_str_radix(&hex, 16).unwrap() } } impl std::convert::TryFrom<u32> for CJKUnifiedIdeographsExtensionC { type Error = (); fn try_from(u: u32) -> Result<Self, Self::Error> { if let Ok(c) = char::try_from(u) { Self::try_from(c) } else { Err(()) } } } impl Iterator for CJKUnifiedIdeographsExtensionC { type Item = Self; fn next(&mut self) -> Option<Self> { let index: u32 = (*self).into(); use std::convert::TryFrom; Self::try_from(index + 1).ok() } } impl CJKUnifiedIdeographsExtensionC { /// The character with the lowest index in this unicode block pub fn new() -> Self { CJKUnifiedIdeographsExtensionC::CjkIdeographExtensionCFirst } /// The character's name, in sentence case pub fn name(&self) -> String { let s = std::format!("CJKUnifiedIdeographsExtensionC{:#?}", self); string_morph::to_sentence_case(&s) } }
//! Library that contains utility functions for tests. //! //! It also contains a test module, which checks if all source files are covered by `Cargo.toml` extern crate hyper; extern crate regex; extern crate rustc_serialize; pub mod rosetta_code; use std::fmt::Debug; /// Implementation detail of the `test_sort` macro. #[macro_export] macro_rules! __test_cases { ( $function:path; $( $name:ident => $values:expr; )* ) => { $( #[test] fn $name() { let mut values = $values; $function(&mut values); $crate::check_sorted(&values); } )* } } /// Generates a comprehensive test suite for a sorting algorithm. /// /// This macro can be used to test any sort function that sorts a mutable slice of objects that /// implement the `Ord` or `PartialOrd` traits. /// /// # Example /// /// ``` /// fn sort<E>(elements: &mut [E]) where E: Ord { /// elements.sort(); /// } /// /// #[cfg(test)] /// mod tests { /// test_sort!(super::sort); /// } /// ``` #[macro_export] macro_rules! test_sort { ( $function:path ) => { extern crate rand; use self::rand::Rng; __test_cases! { $function; already_sorted => [-1i32, 0, 3, 6, 99]; array_of_strings => ["beach", "hotel", "airplane", "car", "house", "art"]; empty_vector => Vec::<i32>::new(); one_element_vector => vec![0_i32]; random_numbers => { let mut rng = self::rand::thread_rng(); rng.gen_iter::<i32>().take(10).collect::<Vec<i32>>() }; reverse_sorted_array => [20_i32, 10, 0, -1, -5]; unsorted_array => [4_i32, 65, 2, -31, 0, 99, 2, 83, 782, 1]; unsorted_array_positive => [12_i32, 54, 2, 93, 13, 43, 15, 299, 234]; unsorted_vector_positive => vec![1_i32, 9, 4, 7, 6, 5, 3, 2, 8]; vector_with_repeated_elements => vec![1_i32, 1, 1, 1, 1]; } }; } /// Check if a slice is sorted properly. pub fn check_sorted<E>(candidate: &[E]) where E: Ord + Clone + Debug { let sorted = { let mut copy = candidate.iter().cloned().collect::<Vec<_>>(); copy.sort(); copy }; assert_eq!(sorted.as_slice(), candidate); } #[allow(dead_code)] fn main() {} #[cfg(test)] mod tests { use regex::Regex; use std::collections::HashSet; use std::io::{BufReader, BufRead}; use std::fs::{self, File}; use std::path::Path; #[test] fn check_sorted() { let sorted = vec![1, 2, 3, 4, 5]; super::check_sorted(&sorted); } #[test] #[should_panic] fn check_unsorted() { let unsorted = vec![1, 3, 2]; super::check_sorted(&unsorted); } /// A test to check if all source files are covered by `Cargo.toml` #[test] fn check_sources_covered() { let sources = get_source_files(); let bins = get_toml_paths(); let not_covered = get_not_covered(&sources, &bins); if !not_covered.is_empty() { println!("Error, the following source files are not covered by Cargo.toml:"); for source in &not_covered { println!("{}", source); } panic!("Please add the previous source files to Cargo.toml"); } } /// Returns the names of the source files in the `src` directory fn get_source_files() -> HashSet<String> { let paths = fs::read_dir("./src").unwrap(); paths.map(|p| { p.unwrap() .path() .file_name() .unwrap() .to_os_string() .into_string() .unwrap() }) .filter(|s| s[..].ends_with(".rs")) .collect() } /// Returns the paths of the source files referenced in Cargo.toml fn get_toml_paths() -> HashSet<String> { let c_toml = File::open("./Cargo.toml").unwrap(); let reader = BufReader::new(c_toml); let regex = Regex::new("path = \"(.*)\"").unwrap(); reader.lines() .filter_map(|l| { let l = l.unwrap(); regex.captures(&l).map(|c| { c.at(1) .map(|s| Path::new(s)) .unwrap() .file_name() .unwrap() .to_string_lossy() .into_owned() }) }) .collect() } /// Returns the filenames of the source files which are not covered by Cargo.toml fn get_not_covered<'a>(sources: &'a HashSet<String>, paths: &'a HashSet<String>) -> HashSet<&'a String> { sources.difference(paths).collect() } }
//! A library for handling debian files and other tasks related to the debian package //! repository. For extracting and retrieving data from `.deb` files please go to //! <file::Deb>. //! //! Using this library requires a few common linux packages installed. These are //! `tar`, `mkdir`, and `ar` (part of `binutils`). Please note there are some issues //! with `ar` on ubuntu older than `20.04` // Shared libraries pub mod shared; // File extraction pub mod file;
// This file is part of dpdk. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/dpdk/master/COPYRIGHT. No part of dpdk, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2017 The developers of dpdk. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/dpdk/master/COPYRIGHT. use super::*; use ::std::cmp::Eq; use ::std::cmp::PartialEq; use ::std::hash::Hash; use ::std::hash::Hasher; include!("AttributeFlags.rs"); include!("MulticastGroupIdentifier.rs");
use async_std::io::{self, Read, Write}; use async_std::net::UdpSocket; use async_std::task; use crc::crc32; use crossbeam_utils::Backoff; use rand::random; use std::cell::Cell; use std::cmp::min; use std::collections::{HashMap, VecDeque}; use std::io::{Error, ErrorKind}; use std::net::SocketAddr; use std::pin::Pin; use std::str::FromStr; use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; use std::sync::Arc; use std::task::{Context, Poll, Waker}; use std::time::{Duration, Instant}; use std::vec::Vec; const CMD_SYN: u8 = 128; const CMD_SYN_ACK: u8 = 129; const CMD_ACK: u8 = 130; const CMD_DATA: u8 = 131; const CMD_HEARTBEAT: u8 = 132; const CMD_HEARTBEAT_ACK: u8 = 133; const UCP_PACKET_META_SIZE: usize = 29; const DEFAULT_WINDOW: u32 = 512; const DEFAULT_RTO: u32 = 100; const HEARTBEAT_INTERVAL_MILLIS: u128 = 2500; const UCP_STREAM_BROKEN_MILLIS: u128 = 20000; const SKIP_RESEND_TIMES: u32 = 2; #[derive(Clone)] struct UcpPacket { buf: [u8; 1400], size: usize, payload: u16, read_pos: usize, skip_times: u32, session_id: u32, timestamp: u32, window: u32, xmit: u32, una: u32, seq: u32, cmd: u8, } impl UcpPacket { fn new() -> UcpPacket { UcpPacket { buf: [0; 1400], size: 0, payload: 0, read_pos: 0, skip_times: 0, session_id: 0, timestamp: 0, window: 0, xmit: 0, una: 0, seq: 0, cmd: 0, } } fn parse(&mut self) -> bool { if !self.is_legal() { return false; } self.payload = (self.size - UCP_PACKET_META_SIZE) as u16; self.read_pos = UCP_PACKET_META_SIZE; let mut offset = 4; self.session_id = self.parse_u32(&mut offset); self.timestamp = self.parse_u32(&mut offset); self.window = self.parse_u32(&mut offset); self.xmit = self.parse_u32(&mut offset); self.una = self.parse_u32(&mut offset); self.seq = self.parse_u32(&mut offset); self.cmd = self.parse_u8(&mut offset); self.cmd >= CMD_SYN && self.cmd <= CMD_HEARTBEAT_ACK } fn pack(&mut self) { let mut offset = 4; let session_id = self.session_id; let timestamp = self.timestamp; let window = self.window; let xmit = self.xmit; let una = self.una; let seq = self.seq; let cmd = self.cmd; self.write_u32(&mut offset, session_id); self.write_u32(&mut offset, timestamp); self.write_u32(&mut offset, window); self.write_u32(&mut offset, xmit); self.write_u32(&mut offset, una); self.write_u32(&mut offset, seq); self.write_u8(&mut offset, cmd); offset = 0; self.size = self.payload as usize + UCP_PACKET_META_SIZE; let digest = crc32::checksum_ieee(&self.buf[4..self.size]); self.write_u32(&mut offset, digest); } fn packed_buffer(&self) -> &[u8] { &self.buf[..self.size] } fn parse_u32(&self, offset: &mut isize) -> u32 { let u = unsafe { *(self.buf.as_ptr().offset(*offset) as *const u32) }; *offset += 4; u32::from_be(u) } fn parse_u8(&self, offset: &mut isize) -> u8 { let u = self.buf[*offset as usize]; *offset += 1; u } fn write_u32(&mut self, offset: &mut isize, u: u32) { unsafe { *(self.buf.as_ptr().offset(*offset) as *mut u32) = u.to_be(); } *offset += 4; } fn write_u8(&mut self, offset: &mut isize, u: u8) { self.buf[*offset as usize] = u; *offset += 1; } fn is_legal(&self) -> bool { self.size >= UCP_PACKET_META_SIZE && self.is_crc32_correct() } fn is_crc32_correct(&self) -> bool { let mut offset = 0; let digest = self.parse_u32(&mut offset); crc32::checksum_ieee(&self.buf[4..self.size]) == digest } fn is_syn(&self) -> bool { self.cmd == CMD_SYN } fn remaining_load(&self) -> usize { self.buf.len() - self.payload as usize - UCP_PACKET_META_SIZE } fn payload_offset(&self) -> isize { (self.payload as usize + UCP_PACKET_META_SIZE) as isize } fn payload_write_u32(&mut self, u: u32) -> bool { if self.remaining_load() >= 4 { let mut offset = self.payload_offset(); self.write_u32(&mut offset, u); self.payload += 4; true } else { false } } fn payload_write_slice(&mut self, buf: &[u8]) -> bool { if self.remaining_load() >= buf.len() { let offset = self.payload_offset() as usize; let end = offset + buf.len(); self.buf[offset..end].copy_from_slice(buf); self.payload += buf.len() as u16; true } else { false } } fn payload_remaining(&self) -> usize { self.size - self.read_pos } fn payload_read_u32(&mut self) -> u32 { if self.read_pos + 4 > self.size { panic!("Out of range when read u32 from {}", self.read_pos); } let mut offset = self.read_pos as isize; let u = self.parse_u32(&mut offset); self.read_pos = offset as usize; u } fn payload_read_slice(&mut self, buf: &mut [u8]) -> usize { let size = min(self.payload_remaining(), buf.len()); let end_pos = self.read_pos + size; if size > 0 { buf[0..size].copy_from_slice(&self.buf[self.read_pos..end_pos]); self.read_pos = end_pos; } size } } type UcpPacketQueue = VecDeque<Box<UcpPacket>>; #[derive(Clone, Copy)] enum UcpState { NONE, ACCEPTING, CONNECTING, ESTABLISHED, } struct InnerStream { lock: AtomicUsize, alive: AtomicBool, socket: Arc<UdpSocket>, remote_addr: SocketAddr, initial_time: Instant, alive_time: Cell<Instant>, heartbeat: Cell<Instant>, state: Cell<UcpState>, send_queue: Cell<UcpPacketQueue>, recv_queue: Cell<UcpPacketQueue>, send_buffer: Cell<UcpPacketQueue>, read_waker: Cell<Option<Waker>>, write_waker: Cell<Option<Waker>>, ack_list: Cell<Vec<(u32, u32)>>, session_id: Cell<u32>, local_window: Cell<u32>, remote_window: Cell<u32>, seq: Cell<u32>, una: Cell<u32>, rto: Cell<u32>, srtt: Cell<u32>, rttvar: Cell<u32>, } unsafe impl Send for InnerStream {} unsafe impl Sync for InnerStream {} struct Lock<'a> { inner: &'a InnerStream, } impl Drop for Lock<'_> { #[inline] fn drop(&mut self) { self.inner.unlock(); } } impl InnerStream { fn new(socket: Arc<UdpSocket>, remote_addr: SocketAddr) -> Self { InnerStream { lock: AtomicUsize::new(0), alive: AtomicBool::new(true), socket: socket, remote_addr: remote_addr, initial_time: Instant::now(), alive_time: Cell::new(Instant::now()), heartbeat: Cell::new(Instant::now()), state: Cell::new(UcpState::NONE), send_queue: Cell::new(UcpPacketQueue::new()), recv_queue: Cell::new(UcpPacketQueue::new()), send_buffer: Cell::new(UcpPacketQueue::new()), read_waker: Cell::new(None), write_waker: Cell::new(None), ack_list: Cell::new(Vec::new()), session_id: Cell::new(0), local_window: Cell::new(DEFAULT_WINDOW), remote_window: Cell::new(DEFAULT_WINDOW), seq: Cell::new(0), una: Cell::new(0), rto: Cell::new(DEFAULT_RTO), srtt: Cell::new(0), rttvar: Cell::new(0), } } async fn input(&self, packet: Box<UcpPacket>, remote_addr: SocketAddr) { if self.remote_addr != remote_addr { error!( "unexpect packet from {}, expect from {}", remote_addr, self.remote_addr ); return; } let _l = self.lock(); let state = self.state.get(); match state { UcpState::NONE => { if packet.is_syn() { self.accepting(packet); } } _ => { self.processing(packet).await; } } } async fn output(&self) { let _l = self.lock(); if self.check_if_alive() { self.do_heartbeat().await; self.send_ack_list().await; self.timeout_resend().await; self.send_pending_packets().await; } else { self.die(); } } fn poll_read(&self, cx: &mut Context, buf: &mut [u8]) -> Poll<std::io::Result<usize>> { let _l = self.lock(); if !self.alive() { return Poll::Ready(Err(Error::from(ErrorKind::Other))); } let n = self.recv(buf); if n == 0 { self.read_waker.set(Some(cx.waker().clone())); Poll::Pending } else { Poll::Ready(Ok(n)) } } fn poll_write(&self, cx: &mut Context, buf: &[u8]) -> Poll<std::io::Result<usize>> { let _l = self.lock(); if !self.alive() { return Poll::Ready(Err(Error::from(ErrorKind::Other))); } if self.is_send_buffer_overflow() { self.write_waker.set(Some(cx.waker().clone())); Poll::Pending } else { self.send(buf); Poll::Ready(Ok(buf.len())) } } fn shutdown(&self) { let _l = self.lock(); self.die(); } fn alive(&self) -> bool { self.alive.load(Ordering::Relaxed) } fn die(&self) { self.alive.store(false, Ordering::Relaxed); if let Some(w) = self.read_waker.take() { w.wake() } if let Some(w) = self.write_waker.take() { w.wake() } } fn lock(&self) -> Lock<'_> { let backoff = Backoff::new(); while self.lock.compare_and_swap(0, 1, Ordering::Acquire) != 0 { backoff.snooze(); } Lock { inner: self } } fn unlock(&self) { self.lock.store(0, Ordering::SeqCst); } fn recv(&self, buf: &mut [u8]) -> usize { let mut size = 0; let una = self.una.get(); let recv_queue = unsafe { &mut *self.recv_queue.as_ptr() }; while size < buf.len() && !recv_queue.is_empty() { if let Some(packet) = recv_queue.front_mut() { let diff = (packet.seq - una) as i32; if diff >= 0 { break; } size += packet.payload_read_slice(&mut buf[size..]); } let no_remain_payload = recv_queue .front() .map(|packet| packet.payload_remaining() == 0) .unwrap(); if no_remain_payload { recv_queue.pop_front(); } } size } fn send(&self, buf: &[u8]) { let mut pos = 0; let send_buffer = unsafe { &mut *self.send_buffer.as_ptr() }; if let Some(packet) = send_buffer.back_mut() { if packet.cmd == CMD_DATA { let remain = min(packet.remaining_load(), buf.len()); if remain > 0 { packet.payload_write_slice(&buf[0..remain]); } pos = remain; } } if pos < buf.len() { self.make_packet_send(&buf[pos..]); } } fn try_wake_reader(&self) { let recv_queue = unsafe { &*self.recv_queue.as_ptr() }; if let Some(packet) = recv_queue.front() { let diff = (packet.seq - self.una.get()) as i32; if diff < 0 { if let Some(w) = self.read_waker.take() { w.wake(); } } } } fn try_wake_writer(&self) { if !self.is_send_buffer_overflow() { if let Some(w) = self.write_waker.take() { w.wake(); } } } fn is_send_buffer_overflow(&self) -> bool { let remote_window = self.remote_window.get(); let send_buffer = unsafe { &mut *self.send_buffer.as_ptr() }; send_buffer.len() >= remote_window as usize } fn check_if_alive(&self) -> bool { let now = Instant::now(); let interval = (now - self.alive_time.get()).as_millis(); let alive = interval < UCP_STREAM_BROKEN_MILLIS; if !alive { error!( "ucp alive timeout, remote address: {}, session: {}", self.remote_addr, self.session_id.get() ); } alive } async fn do_heartbeat(&self) { let now = Instant::now(); let interval = (now - self.heartbeat.get()).as_millis(); if interval >= HEARTBEAT_INTERVAL_MILLIS { let mut heartbeat = self.new_noseq_packet(CMD_HEARTBEAT); self.send_packet_directly(&mut heartbeat).await; self.heartbeat.set(now); } } async fn send_ack_list(&self) { let ack_list = self.ack_list.take(); if ack_list.is_empty() { return; } let mut packet = self.new_noseq_packet(CMD_ACK); for &(seq, timestamp) in ack_list.iter() { if packet.remaining_load() < 8 { self.send_packet_directly(&mut packet).await; packet = self.new_noseq_packet(CMD_ACK); } packet.payload_write_u32(seq); packet.payload_write_u32(timestamp); } self.send_packet_directly(&mut packet).await; } async fn timeout_resend(&self) { let now = self.timestamp(); let una = self.una.get(); let rto = self.rto.get(); let mut resend = Vec::new(); { let send_queue = unsafe { &mut *self.send_queue.as_ptr() }; for packet in send_queue.iter_mut() { let interval = now - packet.timestamp; let skip_resend = packet.skip_times >= SKIP_RESEND_TIMES; if interval >= rto || skip_resend { packet.skip_times = 0; packet.window = self.local_window.get(); packet.una = una; packet.timestamp = now; packet.xmit += 1; resend.push(packet.clone()); } } } for packet in resend.iter_mut() { self.send_packet_directly(packet).await; } } async fn send_pending_packets(&self) { let now = self.timestamp(); let una = self.una.get(); let window = self.remote_window.get() as usize; let mut pending = Vec::new(); { let send_queue = unsafe { &mut *self.send_queue.as_ptr() }; let send_buffer = unsafe { &mut *self.send_buffer.as_ptr() }; while send_queue.len() < window { if let Some(q) = send_queue.front() { if let Some(p) = send_buffer.front() { let seq_diff = (p.seq - q.seq) as usize; if seq_diff >= window { break; } } } if let Some(mut packet) = send_buffer.pop_front() { packet.window = self.local_window.get(); packet.una = una; packet.timestamp = now; pending.push(packet.clone()); send_queue.push_back(packet); } else { break; } } } for packet in pending.iter_mut() { self.send_packet_directly(packet).await; } self.try_wake_writer(); } fn connecting(&self) { self.state.set(UcpState::CONNECTING); self.session_id.set(random::<u32>()); let syn = self.new_packet(CMD_SYN); self.send_packet(syn); info!( "connecting ucp server {}, session: {}", self.remote_addr, self.session_id.get() ); } fn accepting(&self, packet: Box<UcpPacket>) { self.state.set(UcpState::ACCEPTING); self.session_id.set(packet.session_id); self.una.set(packet.seq + 1); self.remote_window.set(packet.window); let mut syn_ack = self.new_packet(CMD_SYN_ACK); syn_ack.payload_write_u32(packet.seq); syn_ack.payload_write_u32(packet.timestamp); self.send_packet(syn_ack); info!( "accepting ucp client {}, session: {}", self.remote_addr, self.session_id.get() ); } async fn processing(&self, packet: Box<UcpPacket>) { if self.session_id.get() != packet.session_id { error!( "unexpect session_id: {}, expect {}", packet.session_id, self.session_id.get() ); return; } self.alive_time.set(Instant::now()); self.remote_window.set(packet.window); let state = self.state.get(); match state { UcpState::ACCEPTING => { self.process_state_accepting(packet); } UcpState::CONNECTING => { self.process_state_connecting(packet).await; } UcpState::ESTABLISHED => { self.process_state_established(packet).await; } UcpState::NONE => {} } } fn process_state_accepting(&self, mut packet: Box<UcpPacket>) { if packet.cmd == CMD_ACK && packet.payload == 8 { let seq = packet.payload_read_u32(); let timestamp = packet.payload_read_u32(); if self.process_an_ack(seq, timestamp) { self.state.set(UcpState::ESTABLISHED); info!( "{} established, session: {}", self.remote_addr, self.session_id.get() ); } } } async fn process_state_connecting(&self, packet: Box<UcpPacket>) { self.process_syn_ack(packet).await; } async fn process_state_established(&self, packet: Box<UcpPacket>) { self.process_una(packet.una); match packet.cmd { CMD_ACK => { self.process_ack(packet); } CMD_DATA => { self.process_data(packet); } CMD_SYN_ACK => { self.process_syn_ack(packet).await; } CMD_HEARTBEAT => { self.process_heartbeat().await; } CMD_HEARTBEAT_ACK => { self.process_heartbeat_ack(); } _ => {} } } fn process_una(&self, una: u32) { let send_queue = unsafe { &mut *self.send_queue.as_ptr() }; while !send_queue.is_empty() { let diff = send_queue .front() .map(|packet| (packet.seq - una) as i32) .unwrap(); if diff < 0 { send_queue.pop_front(); } else { break; } } } fn process_ack(&self, mut packet: Box<UcpPacket>) { if packet.cmd == CMD_ACK && packet.payload % 8 == 0 { while packet.payload_remaining() > 0 { let seq = packet.payload_read_u32(); let timestamp = packet.payload_read_u32(); self.process_an_ack(seq, timestamp); } } } fn process_data(&self, packet: Box<UcpPacket>) { let ack_list = unsafe { &mut *self.ack_list.as_ptr() }; ack_list.push((packet.seq, packet.timestamp)); let una = self.una.get(); let una_diff = (packet.seq - una) as i32; if una_diff < 0 { return; } let mut pos = 0; let recv_queue = unsafe { &mut *self.recv_queue.as_ptr() }; for i in 0..recv_queue.len() { let seq_diff = (packet.seq - recv_queue[i].seq) as i32; if seq_diff == 0 { return; } else if seq_diff < 0 { break; } else { pos += 1; } } recv_queue.insert(pos, packet); for i in pos..recv_queue.len() { let una = self.una.get(); if recv_queue[i].seq == una { self.una.set(una + 1); } else { break; } } self.try_wake_reader(); } async fn process_syn_ack(&self, mut packet: Box<UcpPacket>) { if packet.cmd == CMD_SYN_ACK && packet.payload == 8 { let seq = packet.payload_read_u32(); let timestamp = packet.payload_read_u32(); let mut ack = self.new_noseq_packet(CMD_ACK); ack.payload_write_u32(packet.seq); ack.payload_write_u32(packet.timestamp); self.send_packet_directly(&mut ack).await; match self.state.get() { UcpState::CONNECTING => { if self.process_an_ack(seq, timestamp) { self.state.set(UcpState::ESTABLISHED); self.una.set(packet.seq + 1); info!( "{} established, session: {}", self.remote_addr, self.session_id.get() ); } } _ => {} } } } async fn process_heartbeat(&self) { let mut heartbeat_ack = self.new_noseq_packet(CMD_HEARTBEAT_ACK); self.send_packet_directly(&mut heartbeat_ack).await; } fn process_heartbeat_ack(&self) { self.alive_time.set(Instant::now()); } fn process_an_ack(&self, seq: u32, timestamp: u32) -> bool { let rtt = self.timestamp() - timestamp; self.update_rto(rtt); let send_queue = unsafe { &mut *self.send_queue.as_ptr() }; for i in 0..send_queue.len() { if send_queue[i].seq == seq { send_queue.remove(i); return true; } else { if send_queue[i].timestamp <= timestamp { send_queue[i].skip_times += 1; } } } false } fn update_rto(&self, rtt: u32) { // The calculation accuracy is milliseconds let mut srtt = self.srtt.get(); if srtt == 0 { srtt = rtt; } srtt = (srtt * 9 + rtt) / 10; let mut rttvar = self.rttvar.get(); let delta = if rtt > srtt { rtt - srtt } else { srtt - rtt }; rttvar = (rttvar * 3 + delta) / 4; let rto = srtt + 4 * rttvar; self.rto.set(rto); self.srtt.set(srtt); self.rttvar.set(rttvar); } fn new_packet(&self, cmd: u8) -> Box<UcpPacket> { let mut packet = Box::new(UcpPacket::new()); packet.session_id = self.session_id.get(); packet.timestamp = self.timestamp(); packet.window = self.local_window.get(); packet.seq = self.next_seq(); packet.una = self.una.get(); packet.cmd = cmd; packet } fn new_noseq_packet(&self, cmd: u8) -> Box<UcpPacket> { let mut packet = Box::new(UcpPacket::new()); packet.session_id = self.session_id.get(); packet.timestamp = self.timestamp(); packet.window = self.local_window.get(); packet.una = self.una.get(); packet.cmd = cmd; packet } fn timestamp(&self) -> u32 { (Instant::now() - self.initial_time).as_millis() as u32 } fn next_seq(&self) -> u32 { let seq = unsafe { &mut *self.seq.as_ptr() }; *seq += 1; *seq } fn make_packet_send(&self, buf: &[u8]) { let buf_len = buf.len(); let mut pos = 0; while pos < buf_len { let mut packet = self.new_packet(CMD_DATA); let size = min(packet.remaining_load(), buf_len - pos); let end_pos = pos + size; packet.payload_write_slice(&buf[pos..end_pos]); self.send_packet(packet); pos = end_pos; } } fn send_packet(&self, packet: Box<UcpPacket>) { let send_buffer = unsafe { &mut *self.send_buffer.as_ptr() }; send_buffer.push_back(packet); } async fn send_packet_directly(&self, packet: &mut Box<UcpPacket>) { packet.pack(); let _ = self .socket .send_to(packet.packed_buffer(), self.remote_addr) .await; } } pub struct UcpStream { inner: Arc<InnerStream>, } impl UcpStream { pub async fn connect(server_addr: &str) -> Self { let socket = Arc::new(UdpSocket::bind("0.0.0.0:0").await.unwrap()); let remote_addr = SocketAddr::from_str(server_addr).unwrap(); let inner = Arc::new(InnerStream::new(socket, remote_addr)); inner.connecting(); let sender = inner.clone(); task::spawn(async move { UcpStream::send(sender).await; }); let receiver = inner.clone(); task::spawn(async move { UcpStream::recv(receiver).await; }); UcpStream { inner: inner } } pub fn shutdown(&self) { self.inner.shutdown(); } async fn send(inner: Arc<InnerStream>) { loop { task::sleep(Duration::from_millis(10)).await; inner.output().await; if !inner.alive() { break; } } } async fn recv(inner: Arc<InnerStream>) { loop { let mut packet = Box::new(UcpPacket::new()); let result = io::timeout( Duration::from_secs(5), inner.socket.recv_from(&mut packet.buf), ) .await; if !inner.alive() { break; } if let Ok((size, remote_addr)) = result { packet.size = size; if packet.parse() { inner.input(packet, remote_addr).await; } else { error!("recv illgal packet from {}", remote_addr); } } } } } impl Read for &UcpStream { fn poll_read( self: Pin<&mut Self>, cx: &mut Context, buf: &mut [u8], ) -> Poll<std::io::Result<usize>> { self.inner.poll_read(cx, buf) } } impl Write for &UcpStream { fn poll_write( self: Pin<&mut Self>, cx: &mut Context, buf: &[u8], ) -> Poll<std::io::Result<usize>> { self.inner.poll_write(cx, buf) } fn poll_flush(self: Pin<&mut Self>, _cx: &mut Context) -> Poll<std::io::Result<()>> { Poll::Ready(Ok(())) } fn poll_close(self: Pin<&mut Self>, _cx: &mut Context) -> Poll<std::io::Result<()>> { Poll::Ready(Ok(())) } } type UcpStreamMap = HashMap<SocketAddr, Arc<InnerStream>>; pub struct UcpListener { socket: Arc<UdpSocket>, stream_map: UcpStreamMap, timestamp: Instant, } impl UcpListener { pub async fn bind(listen_addr: &str) -> Self { let socket = Arc::new(UdpSocket::bind(listen_addr).await.unwrap()); UcpListener { socket: socket, stream_map: UcpStreamMap::new(), timestamp: Instant::now(), } } pub async fn incoming(&mut self) -> UcpStream { loop { let mut packet = Box::new(UcpPacket::new()); let result = io::timeout( Duration::from_secs(1), self.socket.recv_from(&mut packet.buf), ) .await; if let Ok((size, remote_addr)) = result { packet.size = size; if packet.parse() { if let Some(inner) = self.stream_map.get(&remote_addr) { inner.input(packet, remote_addr).await; } else if packet.is_syn() { return self.new_stream(packet, remote_addr).await; } else { error!("unknown ucp session packet from {}", remote_addr); } } else { error!("recv illgal packet from {}", remote_addr); } } self.remove_dead_stream(); } } async fn new_stream(&mut self, packet: Box<UcpPacket>, remote_addr: SocketAddr) -> UcpStream { info!("new ucp client from {}", remote_addr); let inner = Arc::new(InnerStream::new(self.socket.clone(), remote_addr)); inner.input(packet, remote_addr).await; let sender = inner.clone(); task::spawn(async move { UcpStream::send(sender).await; }); self.stream_map.insert(remote_addr, inner.clone()); UcpStream { inner: inner } } fn remove_dead_stream(&mut self) { let now = Instant::now(); if (now - self.timestamp).as_millis() < 1000 { return; } let mut keys = Vec::new(); for (addr, stream) in self.stream_map.iter() { if !stream.alive() { keys.push(addr.clone()); } } for addr in keys.iter() { self.stream_map.remove(addr); } self.timestamp = now; } }
#[test] fn percentage_position_bottom_right() { let layout = stretch::node::Node::new( stretch::style::Style { size: stretch::geometry::Size { width: stretch::style::Dimension::Points(500f32), height: stretch::style::Dimension::Points(500f32), ..Default::default() }, ..Default::default() }, vec![&stretch::node::Node::new( stretch::style::Style { size: stretch::geometry::Size { width: stretch::style::Dimension::Percent(0.55f32), height: stretch::style::Dimension::Percent(0.15f32), ..Default::default() }, position: stretch::geometry::Rect { end: stretch::style::Dimension::Percent(0.2f32), bottom: stretch::style::Dimension::Percent(0.1f32), ..Default::default() }, ..Default::default() }, vec![], )], ) .compute_layout(stretch::geometry::Size::undefined()) .unwrap(); assert_eq!(layout.size.width, 500f32); assert_eq!(layout.size.height, 500f32); assert_eq!(layout.location.x, 0f32); assert_eq!(layout.location.y, 0f32); assert_eq!(layout.children[0usize].size.width, 275f32); assert_eq!(layout.children[0usize].size.height, 75f32); assert_eq!(layout.children[0usize].location.x, -100f32); assert_eq!(layout.children[0usize].location.y, -50f32); }
use amethyst::{ prelude::{ World, Builder, WorldExt }, ecs::Entity, assets::{ Handle }, core::transform::Transform }; use amethyst_gltf::GltfSceneAsset; use super::behaviour::{CanTransform,RotateDirection}; pub struct Soldier { entity: Entity, position: (f32, f32, f32), rotation: (f32, f32) } impl Soldier { pub fn new( world: &mut World, soldier: Handle<GltfSceneAsset>)->Soldier{ let mut transform = Transform::default(); transform.set_translation_xyz(0.0, 0.0, 10.0); let entity = world.create_entity() .with(soldier) .with(transform) .build(); Soldier { entity, position:(0.0,0.0,0.0), rotation: (0.0,0.0) } } } impl CanTransform for Soldier { fn set_position(&mut self, pos: (f32, f32, f32)){ self.position = pos; } fn set_rotation(&mut self, dir: RotateDirection, rad: f32){ match dir { RotateDirection::Horizontal=>{ self.rotation = (rad,self.rotation.1); } RotateDirection::Vertical=>{ self.rotation = (self.rotation.1,rad); } }; } fn get_entity(&self)-> Entity{ self.entity } }
#![feature(globs)] #![allow(non_snake_case_functions)] #![allow(dead_code)] #![allow(non_camel_case_types)] extern crate libc; use libc::types::common::c95::c_void; use rinapi::prelude::*; mod rinapi; extern "stdcall" fn window_procedure( window : Window , message : UINT , wParam : WPARAM , lParam : LPARAM ) -> LRESULT { let normal = PostWindowMessages::Normal; match message { WindowMessages::Create => { MultimediaService::playSound("helloworld.wav".asText().as_ptr() , None , SoundPlayOptions::FileName | SoundPlayOptions::Async ); normal } /* WindowMessages::Paint => { return PostWindowMessages::Normal; } , */ WindowMessages::Destroy => { MessageService::postQuit(PostWindowMessages::Normal); normal } _ => { window.pass(message , wParam , lParam) } } } fn main(){ let application = QuickService::Application(); let appName = "RINAPI hello world".asText(); let new_class_name = "New Class".asText().as_ptr(); let mut window_class = WindowClass::new(&WindowClassLayout{ class_style : None , window_procedure : window_procedure , class_extra_size : 0 , window_extra_size : 0 , application : application , icon : Some( IconService::load(StandardIcons::Warning) ) , cursor : Some(CursorService::load(StandardCursors::Wait)) , background : Some(DCService::GetStockObject(StockLogicalObjects::DarkGrayBrush)) , menu_name : None , class_name : new_class_name }); window_class.RegisterClass(); let window = WindowService::CreateWindow( Some(new_class_name) , None , WindowStyles::OverLappedWindow , CreateWindowOptions::UseDefault , CreateWindowOptions::UseDefault , CreateWindowOptions::UseDefault , CreateWindowOptions::UseDefault , None , None , Some(application) , None ); window.show(WindowShowStyleCommands::ShowNormal); window.update(); let mut message = Message::new(); while message.GetMessage(None , Some(0) , Some(0)) { message.translate(); message.dispatch(); } //return message.wParam; return; }
use crate::{ hex::coordinates::{ cubic::CubicVector, direction::{HexagonalDirection, NUM_DIRECTIONS}, ring::{BigRingIter, RingIter}, HexagonalVector, }, vector::Vector2ISize, }; use std::ops::{Mul, MulAssign}; #[derive( Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Add, AddAssign, Sub, SubAssign, Debug, )] pub struct AxialVector(Vector2ISize); impl AxialVector { pub fn new(q: isize, r: isize) -> Self { Self(Vector2ISize { x: q, y: r }) } pub fn q(&self) -> isize { self.0.x } pub fn r(&self) -> isize { self.0.y } pub fn distance(self, other: Self) -> isize { CubicVector::from(self).distance(CubicVector::from(other)) } pub fn ring_iter(&self, radius: usize) -> RingIter<Self> { RingIter::new(radius, *self) } pub fn big_ring_iter(&self, cell_radius: usize, radius: usize) -> BigRingIter<Self> { BigRingIter::new(cell_radius, radius, *self) } } impl Mul<isize> for AxialVector { type Output = Self; fn mul(self, rhs: isize) -> Self::Output { Self(self.0 * rhs) } } impl MulAssign<isize> for AxialVector { fn mul_assign(&mut self, rhs: isize) { self.0 *= rhs } } impl Mul<AxialVector> for isize { type Output = AxialVector; fn mul(self, rhs: AxialVector) -> Self::Output { rhs * self } } impl HexagonalVector for AxialVector {} // Don't use constructor and lazy_static so that the compiler can actually optimize the use // of directions. const DIRECTIONS: [AxialVector; NUM_DIRECTIONS] = [ AxialVector(Vector2ISize { x: 1, y: 0 }), AxialVector(Vector2ISize { x: 1, y: -1 }), AxialVector(Vector2ISize { x: 0, y: -1 }), AxialVector(Vector2ISize { x: -1, y: 0 }), AxialVector(Vector2ISize { x: -1, y: 1 }), AxialVector(Vector2ISize { x: 0, y: 1 }), ]; impl HexagonalDirection for AxialVector { fn direction(direction: usize) -> Self { DIRECTIONS[direction] } } #[test] fn test_new_axial_vector() { assert_eq!( AxialVector::new(1, -3), AxialVector(Vector2ISize { x: 1, y: -3 }) ) } #[test] fn test_axial_vector_q() { assert_eq!(AxialVector::new(1, -3).q(), 1); } #[test] fn test_axial_vector_r() { assert_eq!(AxialVector::new(1, -3).r(), -3); } #[test] fn test_axial_vector_addition() { assert_eq!( AxialVector::new(1, -3) + AxialVector::new(-10, 30), AxialVector::new(-9, 27) ); } #[test] fn test_axial_vector_subtraction() { assert_eq!( AxialVector::new(1, -3) - AxialVector::new(-10, 30), AxialVector::new(11, -33) ); } #[test] fn test_axial_vector_distance() { let a = AxialVector::new(1, -3); let b = AxialVector::new(-2, 5); assert_eq!(a.distance(b), 8); assert_eq!(b.distance(a), 8); } #[test] fn test_axial_directions_are_unique() { for dir1 in 0..NUM_DIRECTIONS - 1 { for dir2 in dir1 + 1..NUM_DIRECTIONS { assert_ne!(DIRECTIONS[dir1], DIRECTIONS[dir2]) } } } #[test] fn test_axial_directions_have_opposite() { for dir in 0..NUM_DIRECTIONS / 2 { assert_eq!( DIRECTIONS[dir] + DIRECTIONS[dir + NUM_DIRECTIONS / 2], AxialVector::default() ); } } #[test] fn test_axial_neighbor() { assert_eq!(AxialVector::new(-1, 1).neighbor(0), AxialVector::new(0, 1)); } #[cfg(test)] fn do_test_axial_ring_iter(radius: usize, expected: &Vec<AxialVector>) { let center = AxialVector::default(); let mut iter = center.ring_iter(radius); let mut peeked = iter.peek().cloned(); assert!(peeked.is_some()); let mut i = 0; loop { let next = iter.next(); assert_eq!(next, peeked); peeked = iter.peek().cloned(); if i < expected.len() { assert_eq!(next, Some(expected[i])); assert_eq!(expected[i].distance(center), radius as isize); } else { assert_eq!(next, None); break; } i += 1; } assert_eq!(peeked, None); assert_eq!(iter.next(), None); assert_eq!(iter.size_hint(), (expected.len(), Some(expected.len()))); } #[test] fn test_axial_ring_iter0() { do_test_axial_ring_iter(0, &vec![AxialVector::default()]); } #[test] fn test_axial_ring_iter1() { do_test_axial_ring_iter( 1, &vec![ AxialVector::new(-1, 1), AxialVector::new(0, 1), AxialVector::new(1, 0), AxialVector::new(1, -1), AxialVector::new(0, -1), AxialVector::new(-1, 0), ], ); } #[test] fn test_axial_ring_iter2() { do_test_axial_ring_iter( 2, &vec![ AxialVector::new(-2, 2), AxialVector::new(-1, 2), AxialVector::new(0, 2), AxialVector::new(1, 1), AxialVector::new(2, 0), AxialVector::new(2, -1), AxialVector::new(2, -2), AxialVector::new(1, -2), AxialVector::new(0, -2), AxialVector::new(-1, -1), AxialVector::new(-2, 0), AxialVector::new(-2, 1), ], ); }
fn main() { // // ─── VARIABLES AND MUTABILITY ─────────────────────────────────────────────────── // // Variables are immutable by default let immutable_variable = 5; println!("The value of immutable_variable is: {}", immutable_variable); // immutable_variable = 6; // Since x is immutable, this would throw an error //This variable will be mutable let mut mutable_variable = 5; println!("The value of mutable_variable is: {}", mutable_variable); //This will mutate the variable mutable_variable = 6; println!("The value of mutable_variable is: {}", mutable_variable); //Constants can only be declared once in the scope. const MAX_POINTS: u32 = 100_000; println!("The value of constant MAX_POINTS is: {}", MAX_POINTS); //The shadowed variable will be overriden by the previous one. //Shadowing is recreating the variable, while a 'mut will ' let shadowed_variable = 5; let shadowed_variable = shadowed_variable + 1; let shadowed_variable = shadowed_variable * 2; println!("The value of shadowed variable is {}", shadowed_variable); //Shadowing allows for type changes. The first spaces is let spaces = " "; let spaces = spaces.len(); println!("The number of spaces in the spaces variable is: {}", spaces); // // ─── DATA TYPES ───────────────────────────────────────────────────────────────── // //We need to provide the type to parse to. Would error without u32 type let parsed_number_from_string: u32 = "42".parse().expect("Not a number!"); println!("parsed_number_from_string is: {}", parsed_number_from_string); //Integer literals // let integer_literal_decimal = 922_222; // let integer_literal_hex = 0xff; // let integer_literal_octal = 0o77; // let integer_literal_binary = 0b1111_0000; //Numeric Operations let calc = 5.0 + 3.0 - 3.5 / 2.2 * 4.0 % 5.0; println!("numeric operations calc is: {}", calc); //Boolean type let boolean_type: bool = true; println!("boolean type value {}", boolean_type ); //Character type let heart_eyed_cat = '😻'; println!("Character type: {}", heart_eyed_cat ); //Compound types //Tuple type //let tuple: (i32, f64, u8) = (500, 6.4, 1); //Array type let array = [1, 2, 3, 4, 5]; let index = 2; let element = array[index]; println!("The value array element is: {}", element); // // ─── FUNCTIONS ────────────────────────────────────────────────────────────────── // println!("Return value of the function {}", another_function(5)); // // ─── CONTROL FLOW ─────────────────────────────────────────────────────────────── // let test_number = 3; //Basic if else if test_number != 0 { println!("number was something other than zero"); } else if test_number % 3 == 0 { println!("number is divisible by 3"); } //Conditional let statement let conditional_number = if test_number != 0 { 5 } else { 0 }; println!("conditional_ number is: {}", conditional_number); //Loop statement let mut loop_variable = 0; loop { loop_variable = loop_variable + 1; println!("loop variable: {}", loop_variable); if loop_variable == 2 { break; } } //While statement let mut while_variable = 0; while while_variable < 5 { println!("while variable: {}", while_variable); while_variable = while_variable + 1; } let for_array = [10,20,30,40,50]; for array_element in for_array.iter() { println!("array element: {}", array_element) } } fn another_function(x: i32) -> i32 { println!("The passed value is: {}", x); { let y = x + 2; y * 2 } }
fn main() { let mut x = 5; println!("The value of x is: {}", x); x = 6; println!("The value of x is: {}", x); // A constant can be set globally const MAX_POINTS: u32 = 100_000; println!("Max points: {}", MAX_POINTS); // Shadowing. Differs from mut vars because 1) we use 'let' each time, // 2) we can change it a few times with let, but then it is immutable // when we don't use let, and 3) We can change type with shadowing. let x = 5; // This 'x' 'shadows' the first one let x = x + 1; let x = x * 2; println!("The value of x is: {}", x); // Changing type with shadowing let spaces = " "; let spaces = spaces.len(); println!("Spaces: {}", spaces); }
/* Primitive Types Integer: u8, i8 u16, i16, u32, i32, u64, i64, u128, i128 (Number fof bits) Floats: f32, f64 Boolean: bool Charaters: char Tuples Arrays */ pub fn run() { // Default is i32 let a = 5; // Default is f64 let b = 3.5; // Explicit type let c: i64 = 9843754938; println!("{:?}", (a, b, c)); // find maximum size println!("Max i32: {}", std::i32::MAX); println!("Max i64: {}", std::i64::MAX); // boolean let is_active = true; // get boolean from expression let is_greater = 10 > 15; println!("{:?}", (is_active, is_greater)); // char let x = 'a'; // imogi unicode let face = '\u{1F600}'; println!("{:?}", (x, face)); }
use std::io::*; use std::str::FromStr; fn read<T: FromStr>() -> T { let stdin = stdin(); let stdin = stdin.lock(); let token: String = stdin .bytes() .map(|c| c.expect("failed to read char") as char) .skip_while(|c| c.is_whitespace()) .take_while(|c| !c.is_whitespace()) .collect(); token.parse().ok().expect("failed to parse token") } fn main() { let n: usize = read(); let v: Vec<(i32, i32)> = (0..n).map(|_| (read(), read())).collect(); let mut ret = 0.0; for i in 0..n { for j in i+1..n{ ret += dist(&v[i], &v[j]) as f64; } } let n_factorial = factorial(n) as f64; let offset = 2.0 * factorial(n-1) as f64; println!("{}", (offset * ret) / n_factorial); } fn dist(a: &(i32,i32), b: &(i32, i32)) -> f64{ (((a.0 - b.0).pow(2) + (a.1 - b.1).pow(2)) as f64).sqrt() } fn factorial(n: usize) -> usize { if n == 0 { 1 } else { n * factorial(n - 1) } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Conversion { pub id: String, pub settings: ConversionSettings, #[serde(default, skip_serializing_if = "Option::is_none")] pub output: Option<conversion::Output>, pub error: Error, pub status: ConversionStatus, #[serde(rename = "creationTime")] pub creation_time: String, } pub mod conversion { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Output { #[serde(rename = "outputAssetUri", default, skip_serializing_if = "Option::is_none")] pub output_asset_uri: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ConversionList { pub conversions: Vec<Conversion>, #[serde(rename = "@nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ConversionInputSettings { #[serde(rename = "storageContainerUri")] pub storage_container_uri: String, #[serde(rename = "storageContainerReadListSas", default, skip_serializing_if = "Option::is_none")] pub storage_container_read_list_sas: Option<String>, #[serde(rename = "blobPrefix", default, skip_serializing_if = "Option::is_none")] pub blob_prefix: Option<String>, #[serde(rename = "relativeInputAssetPath")] pub relative_input_asset_path: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ConversionOutputSettings { #[serde(rename = "storageContainerUri")] pub storage_container_uri: String, #[serde(rename = "storageContainerWriteSas", default, skip_serializing_if = "Option::is_none")] pub storage_container_write_sas: Option<String>, #[serde(rename = "blobPrefix", default, skip_serializing_if = "Option::is_none")] pub blob_prefix: Option<String>, #[serde(rename = "outputAssetFilename", default, skip_serializing_if = "Option::is_none")] pub output_asset_filename: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CreateConversionSettings { pub settings: ConversionSettings, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ConversionSettings { #[serde(rename = "inputLocation")] pub input_location: ConversionInputSettings, #[serde(rename = "outputLocation")] pub output_location: ConversionOutputSettings, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ConversionStatus { NotStarted, Running, Cancelled, Failed, Succeeded, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorResponse { pub error: Error, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Error { pub code: String, pub message: String, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub details: Vec<Error>, #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, #[serde(rename = "innerError", default, skip_serializing_if = "Option::is_none")] pub inner_error: Box<Option<Error>>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SessionProperties { pub id: String, #[serde(rename = "arrInspectorPort", default, skip_serializing_if = "Option::is_none")] pub arr_inspector_port: Option<i64>, #[serde(rename = "handshakePort", default, skip_serializing_if = "Option::is_none")] pub handshake_port: Option<i64>, #[serde(rename = "elapsedTimeMinutes", default, skip_serializing_if = "Option::is_none")] pub elapsed_time_minutes: Option<i64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub hostname: Option<String>, #[serde(rename = "maxLeaseTimeMinutes", default, skip_serializing_if = "Option::is_none")] pub max_lease_time_minutes: Option<i64>, pub size: SessionSize, pub status: SessionStatus, #[serde(default, skip_serializing_if = "Option::is_none")] pub teraflops: Option<f32>, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<Error>, #[serde(rename = "creationTime", default, skip_serializing_if = "Option::is_none")] pub creation_time: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CreateSessionSettings { #[serde(rename = "maxLeaseTimeMinutes")] pub max_lease_time_minutes: i64, pub size: SessionSize, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct UpdateSessionSettings { #[serde(rename = "maxLeaseTimeMinutes")] pub max_lease_time_minutes: i64, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum SessionStatus { Error, Expired, Starting, Ready, Stopped, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum SessionSize { Standard, Premium, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SessionsList { pub sessions: Vec<SessionProperties>, #[serde(rename = "@nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, }
use std::fmt::{self, Debug, Formatter}; use std::future::Future; use std::pin::Pin; use std::str; use std::task::{Context, Poll}; use base64::Engine; use futures::future::BoxFuture; use futures::TryFutureExt; use http::header::{CONTENT_TYPE, LOCATION}; use http::uri::{Parts, PathAndQuery, Uri}; use rand::RngCore; use serde::{Deserialize, Serialize}; use tower::ServiceExt; use crate::db::ConnectionRef; use crate::util; use crate::util::consts::APPLICATION_WWW_FORM_URLENCODED; use crate::util::HttpService; #[derive(Debug, Serialize, Deserialize)] #[serde(tag = "hub.mode")] #[serde(rename_all = "lowercase")] pub enum Form<S = String> { Subscribe { #[serde(rename = "hub.callback")] #[serde(with = "http_serde::uri")] callback: Uri, #[serde(rename = "hub.topic")] topic: S, #[serde(rename = "hub.secret")] secret: S, }, Unsubscribe { #[serde(rename = "hub.callback")] #[serde(with = "http_serde::uri")] callback: Uri, #[serde(rename = "hub.topic")] topic: S, }, } #[derive(Debug, Serialize, Deserialize)] #[serde(tag = "hub.mode")] #[serde(rename_all = "lowercase")] pub enum Verify<S = String> { Subscribe { #[serde(rename = "hub.topic")] topic: S, #[serde(rename = "hub.challenge")] challenge: S, #[serde(rename = "hub.lease_seconds")] #[serde(deserialize_with = "crate::util::deserialize_from_str")] lease_seconds: u64, }, Unsubscribe { #[serde(rename = "hub.topic")] topic: S, #[serde(rename = "hub.challenge")] challenge: S, }, } pub struct ResponseFuture<'a, E> { // TODO: Use TAIT once it's stable // <https://github.com/rust-lang/rust/issues/63063> inner: BoxFuture<'a, Result<(), E>>, } const SECRET_LEN: usize = 32; type Secret = string::String<[u8; SECRET_LEN]>; impl<E> Future for ResponseFuture<'_, E> { type Output = Result<(), E>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { self.inner.as_mut().poll(cx) } } impl<E> Debug for ResponseFuture<'_, E> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.debug_struct("ResponseFuture").finish() } } pub fn subscribe<'a, 's: 'a, 'b: 'a, C, S, B>( callback: &Uri, hub: String, topic: String, client: S, conn: C, ) -> Result<ResponseFuture<'a, S::Error>, C::Error> where C: ConnectionRef, S: HttpService<B> + Send + 's, S::Future: Send, B: From<Vec<u8>> + Send + 'b, { let (id, secret) = match create_subscription(&hub, &topic, conn) { Ok((id, secret)) => (id, secret), Err(e) => return Err(e), }; log::info!("Subscribing to topic {} at hub {} ({})", topic, hub, id); let body = serde_urlencoded::to_string(Form::Subscribe { callback: make_callback(callback.clone(), id), topic: &*topic, secret: &*secret, }) .unwrap(); Ok(send_request(hub, topic, body, client)) } pub fn unsubscribe<'a, 's: 'a, 'b: 'a, C, S, B>( callback: &Uri, id: u64, hub: String, topic: String, client: S, mut conn: C, ) -> Result<ResponseFuture<'a, S::Error>, C::Error> where C: ConnectionRef, S: HttpService<B> + Send + 's, S::Future: Send, B: From<Vec<u8>> + Send + 'b, { log::info!("Unsubscribing from topic {} at hub {} ({})", topic, hub, id); conn.delete_subscriptions(id)?; let callback = make_callback(callback.clone(), id); let body = serde_urlencoded::to_string(Form::Unsubscribe { callback, topic: &topic, }) .unwrap(); Ok(send_request(hub, topic, body, client)) } fn send_request<'a, 's: 'a, 'b: 'a, S, B>( hub: String, topic: String, body: String, client: S, ) -> ResponseFuture<'a, S::Error> where S: HttpService<B> + Send + 's, S::Future: Send, B: From<Vec<u8>> + Send + 'b, { let req = http::Request::post(&hub) .header(CONTENT_TYPE, APPLICATION_WWW_FORM_URLENCODED) .body(B::from(body.into_bytes())) .unwrap(); let inner = Box::pin(client.into_service().oneshot(req).map_ok(move |res| { let status = res.status(); if status.is_success() { return; } if status.is_redirection() { // TODO: more proper handling. if let Some(to) = res.headers().get(LOCATION) { let to = String::from_utf8_lossy(to.as_bytes()); log::warn!("Topic {} at hub {} redirects to {}", topic, hub, to); } } log::warn!( "Topic {} at hub {} returned HTTP status code {}", topic, hub, status ); })); ResponseFuture { inner } } fn create_subscription<C>(hub: &str, topic: &str, mut conn: C) -> Result<(u64, Secret), C::Error> where C: ConnectionRef, { let mut rng = rand::thread_rng(); let secret = gen_secret(&mut rng); let id = conn.create_subscription(hub, topic, &secret)?; Ok((id, secret)) } fn make_callback(prefix: Uri, id: u64) -> Uri { let id = id.to_le_bytes(); let id = util::callback_id::encode(&id); let mut parts = Parts::from(prefix); // `subscriber::prepare_callback_prefix` ensures that `path_and_query` is `Some`. let path = format!("{}{}", parts.path_and_query.unwrap(), id); parts.path_and_query = Some(PathAndQuery::try_from(path).unwrap()); parts.try_into().unwrap() } fn gen_secret<R: RngCore>(mut rng: R) -> Secret { let mut ret = [0_u8; SECRET_LEN]; let mut rand = [0_u8; SECRET_LEN * 6 / 8]; rng.fill_bytes(&mut rand); let result = base64::engine::general_purpose::URL_SAFE_NO_PAD.encode_slice( // Passing the whole array by-value would be inefficient #[allow(clippy::needless_borrow)] &rand, &mut ret, ); // The result can safely be ignored since the only possible error is `OutputSliceTooSmall`, // which won't happen here, but we are matching against it just in case `base64` crate will add // another error variant. debug_assert!(match result { Ok(len) => len == ret.len(), Err(base64::EncodeSliceError::OutputSliceTooSmall) => false, }); unsafe { // We cannot assume in unsafe code that the safe code of `base64` crate produces a valid // UTF-8 string. str::from_utf8(&ret).unwrap(); // The `unchecked` is still required because `[u8; 32]` doesn't implement // `string::StableAsRef`. // // TODO: Use `string::TryFrom` once the `StableAsRef` implementation lands. // cf. <https://github.com/carllerche/string/pull/28> // // SAFETY: // // `[u8; 32]` satisfies the requirements of `StableAsRef` trait... maybe. At least, `string` // crate itself implements it for `[u8; N]` where `N <= 16`. This seems to be a reasonable // assumption to put on a standard library to keep holding in the past, present and future. // See also the discussion on trusting the impl of primitive types in the Rustonomicon: // <https://doc.rust-lang.org/1.67.1/nomicon/safe-unsafe-meaning.html> string::String::from_utf8_unchecked(ret) } }
use std::fs::File; use std::io::{prelude::*, BufReader}; use std::path::PathBuf; use std::time::{Duration, Instant}; use cadical::{self, TimeoutTerminator}; #[allow(dead_code)] fn bench<T>(func: impl FnOnce() -> T) -> (Duration, T) { let t_start = Instant::now(); let res = func(); let t_end = Instant::now(); (t_end - t_start, res) } #[test] fn test_terminator() { let mut solver = cadical::new().unwrap().finish(); let mut pb = PathBuf::from(env!("CARGO_MANIFEST_DIR")); pb.push("tests"); pb.push("prime65537.cnf"); let f = File::open(pb.as_path()).unwrap(); let mut buffer = String::new(); let mut reader = BufReader::new(f); // skip first line for _ in 0..1 { reader.read_line(&mut buffer).unwrap(); } buffer.clear(); reader.read_to_string(&mut buffer).unwrap(); for lit_s in buffer.split_ascii_whitespace() { solver = solver.add_lit(lit_s.parse().unwrap()); } // measure how much time it takes to solving this formula // let (elapsed, res) = bench(|| solver.solve()); // eprintln!("elapsed: {:?}\n{:?}", elapsed, res); // return; // then adjust timeout here to make sure it terminated before solved let t = TimeoutTerminator::new(Duration::from_millis(10)); solver.set_terminator(Some(t)); let mut solver = match solver.solve() { cadical::Result::Unknown(s) => { // terminated before it got solved, thus unknown s } _ => { assert!(false); unreachable!(); } }; // remove Terminator, this will drop previous terminator solver.set_terminator(cadical::NoneTerminator); let _solver = match solver.solve() { cadical::Result::Unsat(s) => s, _ => { assert!(false); unreachable!(); } }; }
// Copyright 2021 Contributors to the Parsec project. // SPDX-License-Identifier: Apache-2.0 //! Pkcs11 context and initialization types /// Directly get the PKCS #11 operation from the context structure and check for null pointers. macro_rules! get_pkcs11 { ($pkcs11:expr, $func_name:ident) => { ($pkcs11 .impl_ .function_list .$func_name .ok_or(crate::error::Error::NullFunctionPointer)?) }; } mod general_purpose; mod info; mod locking; mod session_management; mod slot_token_management; use cryptoki_sys::{CK_FALSE, CK_TRUE}; pub use general_purpose::*; pub use info::*; pub use locking::*; use crate::error::{Error, Result, Rv}; use crate::mechanism::{MechanismInfo, MechanismType}; use crate::session::Session; use crate::slot::{Slot, SlotInfo, TokenInfo}; use derivative::Derivative; use log::error; use std::mem; use std::path::Path; use std::ptr; use std::sync::Arc; #[derive(Derivative)] #[derivative(Debug)] // Implementation of Pkcs11 class that can be enclosed in a single Arc pub(crate) struct Pkcs11Impl { // Even if this field is never read, it is needed for the pointers in function_list to remain // valid. #[derivative(Debug = "ignore")] _pkcs11_lib: cryptoki_sys::Pkcs11, pub(crate) function_list: cryptoki_sys::_CK_FUNCTION_LIST, } impl Pkcs11Impl { // Private finalize call #[inline(always)] fn finalize(&self) -> Result<()> { unsafe { Rv::from(self .function_list .C_Finalize .ok_or(Error::NullFunctionPointer)?( ptr::null_mut() )) .into_result() } } } impl Drop for Pkcs11Impl { fn drop(&mut self) { if let Err(e) = self.finalize() { error!("Failed to finalize: {}", e); } } } /// Main PKCS11 context. Should usually be unique per application. #[derive(Clone, Debug)] pub struct Pkcs11 { pub(crate) impl_: Arc<Pkcs11Impl>, initialized: bool, } impl Pkcs11 { /// Instantiate a new context from the path of a PKCS11 dynamic library implementation. pub fn new<P>(filename: P) -> Result<Self> where P: AsRef<Path>, { unsafe { let pkcs11_lib = cryptoki_sys::Pkcs11::new(filename.as_ref()).map_err(Error::LibraryLoading)?; let mut list = mem::MaybeUninit::uninit(); Rv::from(pkcs11_lib.C_GetFunctionList(list.as_mut_ptr())).into_result()?; let list_ptr = *list.as_ptr(); Ok(Pkcs11 { impl_: Arc::new(Pkcs11Impl { _pkcs11_lib: pkcs11_lib, function_list: *list_ptr, }), initialized: false, }) } } /// Initialize the PKCS11 library pub fn initialize(&mut self, init_args: CInitializeArgs) -> Result<()> { if !self.initialized { initialize(self, init_args) } else { Err(Error::AlreadyInitialized) } } /// Check whether the PKCS11 library has been initialized pub fn is_initialized(&self) -> bool { self.initialized } /// Finalize the PKCS11 library. Indicates that the application no longer needs to use PKCS11. /// The library is also automatically finalized on drop. pub fn finalize(self) {} /// Returns the information about the library pub fn get_library_info(&self) -> Result<Info> { get_library_info(self) } /// Get all slots available with a token pub fn get_slots_with_token(&self) -> Result<Vec<Slot>> { slot_token_management::get_slots(self, CK_TRUE) } /// Get all slots available with a token pub fn get_slots_with_initialized_token(&self) -> Result<Vec<Slot>> { slot_token_management::get_slots_with_initialized_token(self) } /// Get all slots pub fn get_all_slots(&self) -> Result<Vec<Slot>> { slot_token_management::get_slots(self, CK_FALSE) } /// Initialize a token /// /// Currently will use an empty label for all tokens. pub fn init_token(&self, slot: Slot, pin: &str, label: &str) -> Result<()> { slot_token_management::init_token(self, slot, pin, label) } /// Returns the slot info pub fn get_slot_info(&self, slot: Slot) -> Result<SlotInfo> { slot_token_management::get_slot_info(self, slot) } /// Returns information about a specific token pub fn get_token_info(&self, slot: Slot) -> Result<TokenInfo> { slot_token_management::get_token_info(self, slot) } /// Get all mechanisms support by a slot pub fn get_mechanism_list(&self, slot: Slot) -> Result<Vec<MechanismType>> { slot_token_management::get_mechanism_list(self, slot) } /// Get detailed information about a mechanism for a slot pub fn get_mechanism_info(&self, slot: Slot, type_: MechanismType) -> Result<MechanismInfo> { slot_token_management::get_mechanism_info(self, slot, type_) } /// Open a new Read-Only session /// /// For a Read-Write session, use `open_rw_session` /// /// Note: No callback is set when opening the session. pub fn open_ro_session(&self, slot_id: Slot) -> Result<Session> { session_management::open_session(self, slot_id, false) } /// Open a new Read/Write session /// /// Note: No callback is set when opening the session. pub fn open_rw_session(&self, slot_id: Slot) -> Result<Session> { session_management::open_session(self, slot_id, true) } /// Check whether a given PKCS11 spec-defined function is supported by this implementation pub fn is_fn_supported(&self, function: Function) -> bool { is_fn_supported(self, function) } }
use std::time::Duration; use super::AggregateTarget; use crate::{ bson::{doc, Document}, bson_util, cmap::StreamDescription, concern::{ReadConcern, ReadConcernLevel}, error::{ErrorKind, WriteFailure}, operation::{ test::{self, handle_response_test}, Aggregate, Operation, }, options::{AggregateOptions, Hint}, Namespace, }; fn build_test( target: impl Into<AggregateTarget>, pipeline: Vec<Document>, options: Option<AggregateOptions>, mut expected_body: Document, ) { let target = target.into(); let mut aggregate = Aggregate::new(target.clone(), pipeline, options); let cmd = aggregate.build(&StreamDescription::new_testing()).unwrap(); assert_eq!(cmd.name.as_str(), "aggregate"); assert_eq!(cmd.target_db.as_str(), target.db_name()); let cmd_bytes = aggregate.serialize_command(cmd).unwrap(); let mut cmd_doc = bson::from_slice(&cmd_bytes).unwrap(); bson_util::sort_document(&mut expected_body); bson_util::sort_document(&mut cmd_doc); assert_eq!(cmd_doc, expected_body); } #[test] fn build() { let ns = Namespace { db: "test_db".to_string(), coll: "test_coll".to_string(), }; let pipeline = vec![doc! { "$match": { "x": 3 }}]; let options = AggregateOptions::builder() .hint(Hint::Keys(doc! { "x": 1, "y": 2 })) .bypass_document_validation(true) .read_concern(ReadConcern::from(ReadConcernLevel::Available)) .build(); let expected_body = doc! { "aggregate": "test_coll", "$db": "test_db", "pipeline": bson_util::to_bson_array(&pipeline), "cursor": {}, "hint": { "x": 1, "y": 2, }, "bypassDocumentValidation": true, "readConcern": { "level": "available" }, }; build_test(ns, pipeline, Some(options), expected_body); } #[test] fn build_batch_size() { let ns = Namespace { db: "test_db".to_string(), coll: "test_coll".to_string(), }; let pipeline = Vec::new(); let mut expected_body = doc! { "aggregate": "test_coll", "$db": "test_db", "pipeline": [], "cursor": {}, }; build_test(ns.clone(), pipeline.clone(), None, expected_body.clone()); build_test( ns.clone(), pipeline.clone(), Some(AggregateOptions::default()), expected_body.clone(), ); let batch_size_options = AggregateOptions::builder().batch_size(5).build(); expected_body.insert("cursor", doc! { "batchSize": 5 }); build_test( ns.clone(), pipeline, Some(batch_size_options.clone()), expected_body.clone(), ); let out_pipeline = vec![doc! { "$out": "cat" }]; expected_body.insert("cursor", Document::new()); expected_body.insert("pipeline", bson_util::to_bson_array(&out_pipeline)); build_test( ns.clone(), out_pipeline, Some(batch_size_options.clone()), expected_body.clone(), ); let merge_pipeline = vec![doc! { "$merge": { "into": "out", } }]; expected_body.insert("pipeline", bson_util::to_bson_array(&merge_pipeline)); build_test(ns, merge_pipeline, Some(batch_size_options), expected_body); } #[test] fn build_target() { let pipeline = Vec::new(); let ns = Namespace { db: "test_db".to_string(), coll: "test_coll".to_string(), }; let expected_body = doc! { "aggregate": "test_coll", "$db": "test_db", "pipeline": [], "cursor": {}, }; build_test(ns.clone(), pipeline.clone(), None, expected_body); let expected_body = doc! { "aggregate": 1, "$db": "test_db", "pipeline": [], "cursor": {} }; build_test(ns.db, pipeline, None, expected_body); } #[test] fn build_max_await_time() { let options = AggregateOptions::builder() .max_await_time(Duration::from_millis(5)) .max_time(Duration::from_millis(10)) .build(); let body = doc! { "aggregate": 1, "$db": "test_db", "cursor": {}, "maxTimeMS": 10i32, "pipeline": [] }; build_test("test_db".to_string(), Vec::new(), Some(options), body); } #[test] fn op_selection_criteria() { test::op_selection_criteria(|selection_criteria| { let options = AggregateOptions { selection_criteria, ..Default::default() }; Aggregate::new("".to_string(), Vec::new(), Some(options)) }); } #[test] fn handle_max_await_time() { let response = doc! { "ok": 1, "cursor": { "id": 123, "ns": "a.b", "firstBatch": [] } }; let aggregate = Aggregate::empty(); let spec = handle_response_test(&aggregate, response.clone()).unwrap(); assert!(spec.max_time().is_none()); let max_await = Duration::from_millis(123); let options = AggregateOptions::builder() .max_await_time(max_await) .build(); let aggregate = Aggregate::new(Namespace::empty(), Vec::new(), Some(options)); let spec = handle_response_test(&aggregate, response).unwrap(); assert_eq!(spec.max_time(), Some(max_await)); } #[test] fn handle_write_concern_error() { let response = doc! { "ok": 1.0, "cursor": { "id": 0, "ns": "test.test", "firstBatch": [], }, "writeConcernError": { "code": 64, "codeName": "WriteConcernFailed", "errmsg": "Waiting for replication timed out", "errInfo": { "wtimeout": true } } }; let aggregate = Aggregate::new( Namespace::empty(), vec![doc! { "$merge": { "into": "a" } }], None, ); let error = handle_response_test(&aggregate, response).unwrap_err(); match *error.kind { ErrorKind::Write(WriteFailure::WriteConcernError(_)) => {} ref e => panic!("should have gotten WriteConcernError, got {:?} instead", e), } } #[test] fn handle_invalid_response() { let aggregate = Aggregate::empty(); let garbled = doc! { "asdfasf": "ASdfasdf" }; handle_response_test(&aggregate, garbled).unwrap_err(); let missing_cursor_field = doc! { "ok": 1.0, "cursor": { "ns": "test.test", "firstBatch": [], } }; handle_response_test(&aggregate, missing_cursor_field).unwrap_err(); }
frame_support::decl_module! { pub struct Module<T: Trait> for enum Call where origin: T::Origin, system=self { fn integrity_test() {} fn integrity_test() {} } }
#[doc = "Register `FTSR2` reader"] pub type R = crate::R<FTSR2_SPEC>; #[doc = "Register `FTSR2` writer"] pub type W = crate::W<FTSR2_SPEC>; #[doc = "Field `FT35` reader - FT35"] pub type FT35_R = crate::BitReader; #[doc = "Field `FT35` writer - FT35"] pub type FT35_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FT36` reader - FT36"] pub type FT36_R = crate::BitReader; #[doc = "Field `FT36` writer - FT36"] pub type FT36_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FT37` reader - FT37"] pub type FT37_R = crate::BitReader; #[doc = "Field `FT37` writer - FT37"] pub type FT37_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FT38` reader - FT38"] pub type FT38_R = crate::BitReader; #[doc = "Field `FT38` writer - FT38"] pub type FT38_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 3 - FT35"] #[inline(always)] pub fn ft35(&self) -> FT35_R { FT35_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 4 - FT36"] #[inline(always)] pub fn ft36(&self) -> FT36_R { FT36_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - FT37"] #[inline(always)] pub fn ft37(&self) -> FT37_R { FT37_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bit 6 - FT38"] #[inline(always)] pub fn ft38(&self) -> FT38_R { FT38_R::new(((self.bits >> 6) & 1) != 0) } } impl W { #[doc = "Bit 3 - FT35"] #[inline(always)] #[must_use] pub fn ft35(&mut self) -> FT35_W<FTSR2_SPEC, 3> { FT35_W::new(self) } #[doc = "Bit 4 - FT36"] #[inline(always)] #[must_use] pub fn ft36(&mut self) -> FT36_W<FTSR2_SPEC, 4> { FT36_W::new(self) } #[doc = "Bit 5 - FT37"] #[inline(always)] #[must_use] pub fn ft37(&mut self) -> FT37_W<FTSR2_SPEC, 5> { FT37_W::new(self) } #[doc = "Bit 6 - FT38"] #[inline(always)] #[must_use] pub fn ft38(&mut self) -> FT38_W<FTSR2_SPEC, 6> { FT38_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "EXTI falling trigger selection register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ftsr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ftsr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct FTSR2_SPEC; impl crate::RegisterSpec for FTSR2_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`ftsr2::R`](R) reader structure"] impl crate::Readable for FTSR2_SPEC {} #[doc = "`write(|w| ..)` method takes [`ftsr2::W`](W) writer structure"] impl crate::Writable for FTSR2_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets FTSR2 to value 0"] impl crate::Resettable for FTSR2_SPEC { const RESET_VALUE: Self::Ux = 0; }
// SPDX-License-Identifier: Apache-2.0 use crate::evp; use cipher_bench::{Aead, AeadBuilder}; use std::os::raw::c_int; use std::ptr; pub struct Aes128GcmCtxBuilder { iv: Option<Vec<u8>>, } impl Aes128GcmCtxBuilder { pub fn new() -> Self { Self { iv: None } } fn build(&mut self, key: &[u8], for_encryption: bool) -> Box<dyn Aead> { let ctx = unsafe { let ctx: *mut evp::EVP_CIPHER_CTX = evp::EVP_CIPHER_CTX_new(); let cipher = evp::EVP_aes_128_gcm(); let iv = self.iv.take().unwrap(); let _ = evp::EVP_CipherInit_ex( ctx, cipher, ptr::null_mut::<evp::ENGINE>(), key.as_ptr() as _, iv.as_ptr() as _, for_encryption as _, ); ctx }; Box::new(Aes128GcmCtx { ctx }) } } impl AeadBuilder for Aes128GcmCtxBuilder { fn nonce(&mut self, iv: &[u8]) -> &mut Self { self.iv.replace(iv.to_vec()); self } fn for_encryption(&mut self, key: &[u8]) -> Box<dyn Aead> { self.build(key, true) } fn for_decryption(&mut self, key: &[u8]) -> Box<dyn Aead> { self.build(key, false) } } pub struct Aes128GcmCtx { ctx: *mut evp::EVP_CIPHER_CTX, } impl Aead for Aes128GcmCtx { fn encrypt(&mut self, ptext: &[u8], ctext: &mut [u8]) { let mut outl = ctext.len() as c_int; unsafe { evp::EVP_EncryptUpdate( self.ctx, ctext.as_mut_ptr() as *mut _, &mut outl, ptext.as_ptr() as _, ptext.len() as _, ); } } fn decrypt(&mut self, ctext: &[u8], ptext: &mut [u8]) { let mut outl = ptext.len() as c_int; unsafe { evp::EVP_DecryptUpdate( self.ctx, ptext.as_mut_ptr() as *mut _, &mut outl, ctext.as_ptr() as _, ctext.len() as _, ); } } } #[cfg(test)] mod tests { use super::*; use cipher_bench::AeadAlgorithm; use rand::prelude::*; use std::convert::TryInto; #[test] fn roundtrip() { let mut rng = rand::thread_rng(); let mut key_bytes = vec![0u8; AeadAlgorithm::Aes128Gcm.key_len()]; rng.fill(key_bytes.as_mut_slice()); let mut nonce_bytes = vec![0u8; AeadAlgorithm::Aes128Gcm.nonce_len()]; rng.fill(nonce_bytes.as_mut_slice()); let mut data_bytes = vec![0u8; 1024]; rng.fill(data_bytes.as_mut_slice()); let mut ptext = vec![0u8; 1024]; ptext.copy_from_slice(data_bytes.as_slice()); let mut ctext = vec![0u8; 1024]; let mut builder = Aes128GcmCtxBuilder::new(); let mut ctx = builder.nonce(&nonce_bytes).for_encryption(&key_bytes); ctx.encrypt(&ptext, &mut ctext); let mut ctx = builder.nonce(&nonce_bytes).for_decryption(&key_bytes); ctx.decrypt(&ctext, &mut ptext); assert_eq!(ptext, data_bytes); } }
// Copyright 2014-2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![warn(clippy::implicit_return)] fn test_end_of_fn() -> bool { if true { // no error! return true; } true } #[allow(clippy::needless_bool)] fn test_if_block() -> bool { if true { true } else { false } } #[allow(clippy::match_bool)] #[rustfmt::skip] fn test_match(x: bool) -> bool { match x { true => false, false => { true }, } } #[allow(clippy::never_loop)] fn test_loop() -> bool { loop { break true; } } #[allow(clippy::never_loop)] fn test_loop_with_block() -> bool { loop { { break true; } } } #[allow(clippy::never_loop)] fn test_loop_with_nests() -> bool { loop { if true { break true; } else { let _ = true; } } } fn test_closure() { #[rustfmt::skip] let _ = || { true }; let _ = || true; } fn main() { let _ = test_end_of_fn(); let _ = test_if_block(); let _ = test_match(true); let _ = test_loop(); let _ = test_loop_with_block(); let _ = test_loop_with_nests(); test_closure(); }
use structopt::StructOpt; #[derive(Debug, Clone)] pub struct CleanUpConfig { pub delay_test_results: u32, pub delay_spans: u32, pub delay_reports: u32, pub schedule: u32, } impl Default for CleanUpConfig { fn default() -> Self { CleanUpConfig { delay_test_results: 40 * 24 * 60 * 60 * 1000, delay_spans: 7 * 24 * 60 * 60 * 1000, delay_reports: 14 * 24 * 60 * 60 * 1000, schedule: 60 * 60 * 1000, } } } #[derive(Debug, Clone)] pub struct Config { pub host: String, pub port: u16, pub db_url: String, pub cleanup: CleanUpConfig, } impl Default for Config { fn default() -> Self { Config { host: "0.0.0.0".to_string(), port: 7878, db_url: "127.0.0.1:5042".to_string(), cleanup: CleanUpConfig::default(), } } } impl Config { pub fn load() -> Config { let config = merge_configs(); match config { Ok(config) => config, Err(err) => panic!("{:?}", err), } } } #[derive(Debug, Clone, Deserialize, Default)] #[serde(rename = "cleanup")] pub struct CleanUpConfigLoader { pub delay_test_results: Option<u32>, pub delay_spans: Option<u32>, pub delay_reports: Option<u32>, pub schedule: Option<u32>, } #[derive(Debug, Clone, Deserialize, Default)] pub struct ConfigLoader { pub host: Option<String>, pub port: Option<u16>, pub db_nb_connection: Option<usize>, pub db_url: Option<String>, pub cleanup: Option<CleanUpConfigLoader>, } #[derive(Debug, Clone, Deserialize, StructOpt)] pub struct ConfigLoaderCmd { #[structopt( short = "h", long = "host", env = "HOST", help = "Listen on the specified host, by default 0.0.0.0" )] pub host: Option<String>, #[structopt( short = "p", long = "port", env = "PORT", help = "Listen on the specified host, by default 7878" )] pub port: Option<u16>, #[structopt( long = "db-url", env = "DATABASE_URL", help = "URL to connect to the database" )] pub db_url: Option<String>, } // fn load_config_from_toml() -> ConfigLoader { // let contents = File::open("config.toml").and_then(|mut file| { // let mut contents = String::new(); // file.read_to_string(&mut contents).map(|_| contents) // }); // let config: Option<ConfigLoader> = contents // .ok() // .and_then(|contents| toml::from_str(&contents).ok()); // config.unwrap_or_else(ConfigLoader::default) // } fn load_config_from_hocon() -> ConfigLoader { hocon::HoconLoader::new() .load_file("ikrelln.conf") .and_then(hocon::HoconLoader::resolve) .unwrap_or_else(|_| ConfigLoader::default()) } fn merge_configs() -> Result<Config, String> { let from_args = ConfigLoaderCmd::from_args(); let from_hocon = load_config_from_hocon(); let cleanup_from_hocon = from_hocon.cleanup; let default = Config::default(); Ok(Config { port: from_args.port.or(from_hocon.port).unwrap_or(default.port), host: from_args.host.or(from_hocon.host).unwrap_or(default.host), db_url: from_args .db_url .or(from_hocon.db_url) .ok_or("missing DATABASE_URL parameter")?, cleanup: CleanUpConfig { delay_test_results: cleanup_from_hocon .clone() .and_then(|cleanup| cleanup.delay_test_results) .unwrap_or(default.cleanup.delay_test_results), delay_spans: cleanup_from_hocon .clone() .and_then(|cleanup| cleanup.delay_spans) .unwrap_or(default.cleanup.delay_spans), delay_reports: cleanup_from_hocon .clone() .and_then(|cleanup| cleanup.delay_reports) .unwrap_or(default.cleanup.delay_reports), schedule: cleanup_from_hocon .clone() .and_then(|cleanup| cleanup.schedule) .unwrap_or(default.cleanup.schedule), }, }) }
use std::sync::Arc; use vulkano::buffer::{BufferUsage, CpuAccessibleBuffer}; use vulkano::command_buffer::{AutoCommandBufferBuilder, CommandBuffer}; use vulkano::descriptor::descriptor_set::PersistentDescriptorSet; use vulkano::descriptor::PipelineLayoutAbstract; use vulkano::device::{Device, Queue}; use vulkano::pipeline::ComputePipeline; use vulkano::sync::GpuFuture; pub fn compute_shader_multiply(device: Arc<Device>, queue: Arc<Queue>) { let multi_data = 0..65536; let multi_buffer = CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::all(), false, multi_data) .expect("failed to create par_buffer"); let shader = cs_multiply::Shader::load(device.clone()).expect("failed to create shader module"); let multiplypline = Arc::new( ComputePipeline::new(device.clone(), &shader.main_entry_point(), &()) .expect("failed to create multiplypline"), ); let multi_layout = multiplypline.layout().descriptor_set_layout(0).unwrap(); let multi_set = Arc::new( PersistentDescriptorSet::start(multi_layout.clone()) .add_buffer(multi_buffer.clone()) .unwrap() .build() .unwrap(), ); let multi_cmd_buffer = AutoCommandBufferBuilder::new(device.clone(), queue.family()) .unwrap() .dispatch([1024, 1, 1], multiplypline.clone(), multi_set.clone(), ()) .unwrap() .build() .unwrap(); let multi_finished = multi_cmd_buffer.execute(queue.clone()).unwrap(); multi_finished .then_signal_fence_and_flush() .unwrap() .wait(None) .unwrap(); let multi_data = multi_buffer.read().unwrap(); for (n, val) in multi_data.iter().enumerate() { assert_eq!(*val, n as u32 * 12); } println!("MULTI YAY OKAY!"); } mod cs_multiply { vulkano_shaders::shader! { ty: "compute", path: "shaders/multiply.comp.glsl", } }
#![feature(test)] extern crate test; use sudoku::{Sudoku, BruteForce}; use self::test::Bencher; use std::fs::File; use std::io::BufReader; mod sudoku; #[bench] fn bench_solve_wicked(b: &mut Bencher) { let file = File::open("../samples/wicked.txt").unwrap(); let original = Sudoku::new(BufReader::new(file)); // We measure the time needed to solve the sudoku b.iter(|| { let mut sudoku = original.clone(); sudoku.fast_solve(); if !sudoku.is_completed() { sudoku.brute_force(); } }); }
use specs::*; #[derive(Copy, Clone, Debug, Default, Component)] #[storage(NullStorage)] pub struct IsPlayer; #[derive(Copy, Clone, Debug, Default, Component)] #[storage(NullStorage)] pub struct IsMissile; #[derive(Copy, Clone, Debug, Default, Component)] #[storage(NullStorage)] pub struct IsPowerup; #[derive(Copy, Clone, Debug, Default, Component)] #[storage(NullStorage)] pub struct IsSpectating; #[derive(Copy, Clone, Debug, Default, Component)] #[storage(NullStorage)] pub struct IsDead; #[derive(Copy, Clone, Debug, Default, Component)] #[storage(NullStorage)] pub struct HitMarker;
extern crate byteorder; extern crate csv; extern crate gilrs; extern crate i2cdev; extern crate image; extern crate rust_pigpio; extern crate serde; extern crate serde_json; extern crate robot; //use rust_pigpio::*; use std::fs; use std::process::Command; use std::str; use std::sync::mpsc::{self, TryRecvError}; use std::time::Duration; use std::time::Instant; use std::{thread, time}; use gilrs::Axis::{DPadX, DPadY, LeftStickX, LeftStickY, LeftZ, RightStickX, RightStickY, RightZ}; use gilrs::{Button, Event, EventType, Gilrs}; use serde::{Deserialize, Serialize}; use serde_json::Result; use robot::camera::*; use robot::context::*; use robot::control::*; use robot::hmc5883l::*; use robot::motor::*; use robot::pixel::*; use robot::servo::*; use robot::ssd1327::*; use robot::vl53l0x::*; #[derive(PartialEq)] enum Rotation { StartLeft, StartRight, } #[derive(PartialEq)] enum Activities { Waiting, Searching, MoveTowards, MoveAway, Complete, Done, Finished, Test, } const NONE: i32 = -1; const RED: i32 = 0; const BLUE: i32 = 1; const YELLOW: i32 = 2; const GREEN: i32 = 3; const PURPLE: i32 = 4; const CYAN: i32 = 5; const ALL: i32 = 6; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Calibrate { pub red_lower: [f64; 4], pub red_upper: [f64; 4], pub green_lower: [f64; 4], pub green_upper: [f64; 4], pub blue_lower: [f64; 4], pub blue_upper: [f64; 4], pub yellow_lower: [f64; 4], pub yellow_upper: [f64; 4], } fn _test() { //// Test compass //let mut compass = HMC5883L::new("/dev/i2c-1").unwrap(); //println!("Compass started"); // Test distance sensors let mut leftfront = VL53L0X::new("/dev/i2c-5").unwrap(); println!("left front started"); let mut leftback = VL53L0X::new("/dev/i2c-6").unwrap(); println!("left back started"); let mut back = VL53L0X::new("/dev/i2c-7").unwrap(); println!("back started"); let mut front = VL53L0X::new("/dev/i2c-8").unwrap(); println!("front started"); let mut rightfront = VL53L0X::new("/dev/i2c-10").unwrap(); println!("right front started"); let mut rightback = VL53L0X::new("/dev/i2c-9").unwrap(); println!("right back started"); loop { //println!( //"\x1B[HCurrent Heading {:.*} ", //1, //compass.read_degrees().unwrap() //); println!("Left Back Distance {:.*} ", 1, leftback.read()); println!("Left Front Distance {:.*} ", 1, leftfront.read()); println!("Back Distance {:.*} ", 1, back.read()); println!("Front Distance {:.*} ", 1, front.read()); println!("Right Back Distance {:.*} ", 1, rightback.read()); println!("Right Front Distance {:.*} ", 1, rightfront.read()); } } fn _test2() { let mut cam = build_camera(); loop { let colour = cam.get_colour(true); //print_colour(colour); } } fn _test3() { let mut pixel = build_pixel(); loop { pixel.red(); pixel.render(); println!("Red"); thread::sleep(time::Duration::from_millis(1000)); pixel.green(); pixel.render(); println!("Green"); thread::sleep(time::Duration::from_millis(1000)); pixel.blue(); pixel.render(); println!("Blue"); thread::sleep(time::Duration::from_millis(1000)); pixel.yellow(); pixel.render(); println!("Yellow"); thread::sleep(time::Duration::from_millis(1000)); } } fn _test4() { let mut display = SSD1327::new("/dev/i2c-3"); display.begin().unwrap(); display.clear(); display.draw_text(4, 4, "Canyon...", LT_GREY).unwrap(); let tiny = image::open("The Canyons of Mars Menu Item.jpg").unwrap(); display.draw_image(0, 16, tiny).unwrap(); display.update_all().unwrap(); let mut pixel = build_pixel(); pixel.red(); pixel.render(); println!("Red"); } fn _test5() { let mut control = build_control(); control.init(); control.gear = 1; control.speed(800, 800, 800, 800); thread::sleep(time::Duration::from_millis(5000)); control.gear = 2; control.speed(800, 800, 800, 800); thread::sleep(time::Duration::from_millis(5000)); control.gear = 3; control.speed(800, 800, 800, 800); thread::sleep(time::Duration::from_millis(5000)); control.gear = 4; control.speed(800, 800, 800, 800); thread::sleep(time::Duration::from_millis(5000)); control.stop(); } const MINDIST: u16 = 300; const MAXDIST: u16 = 500; const SPEED: i32 = 350; fn get_deceleration(distance: u16, min: u16) -> f64 { if distance < min { return 0.0; } let distance_togo = distance - min; let mut decel = 1.0; if distance_togo < min { decel = ((distance_togo as f64) / min as f64); if decel < 0.4 { decel = 0.4; } } //println!("decel is: {:?}", decel); return decel; } //fn _calc_target(original: f32, heading: f32) -> f32 { //let mut target = 0.0; //if heading > original { //// heading 270 > original 5 //// Is the distance between (360 - heading + original) (A) //// 360 - 270 + 5 = 95 //let a = 360.0 - heading + original; //// Greater than distance between heading - original (B) //// 270 - 5 = 265 //let b = heading - original; //if a < b { //target = a; //} else { //target = -b; //} //} else { //// heading 5 < original 270 //// Is the distance between (360 - original + heading) (A) //// 360 - 270 + 5 = 95 //let a = 360.0 - heading + original; //// Greater than distance between original - heading (B) //// 270 - 5 = 265 //let b = heading - original; //if a < b { //target = -a; //} else { //target = b; //} //} //return target; //} //fn _align(original: f32, compass: &mut HMC5883L, control: &mut Control, cam: &mut Camera, gear: i32) { //control.stop(); //let mut heading = compass.read_degrees().unwrap(); //let mut diff = calc_target(original, heading); //if diff < 0.5 && diff > -0.5 { //return; //} //if diff > 0.0 { //while diff > 1.0 { //heading = compass.read_degrees().unwrap(); //diff = calc_target(original, heading); //println!( //"Original {:#?}° Current {:#?}° Diff {:#?}", //original, heading, diff //); //control.turn_left(SPEED, gear); //cam.discard_video(); //} //} else { //while diff < -1.0 { //heading = compass.read_degrees().unwrap(); //diff = calc_target(original, heading); //println!( //"Original {:#?}° Current {:#?}° Diff {:#?}", //original, heading, diff //); //control.turn_right(SPEED, gear); //cam.discard_video(); //} //} //control.stop(); //} fn do_canyon(context: &mut Context) { let interval = time::Duration::from_millis(50); //let mut compass = HMC5883L::new("/dev/i2c-1").unwrap(); // Distance sensors let mut front = try_open_tof("/dev/i2c-8"); let mut leftfront = try_open_tof("/dev/i2c-5"); let mut rightfront = try_open_tof("/dev/i2c-10"); let mut back = try_open_tof("/dev/i2c-7"); println!("front started"); println!("left front started"); println!("right front started"); println!("back started"); set_continous(&mut front); set_continous(&mut leftfront); set_continous(&mut rightfront); set_continous(&mut back); let mut control = build_control(); control.init(); let mut distance: u16 = 0; let mut direction = "Forward"; let mut prev_dir = "None"; let mut left_rear_speed: i32; let mut right_rear_speed: i32; let mut left_front_speed: i32; let mut right_front_speed: i32; let mut quit = false; let mut running = false; let mut gear = 1; control.set_gear(gear); control.set_bias(0); let mut decel = 0.0; context.pixel.all_on(); context.pixel.render(); context.display.clear(); context .display .draw_text(4, 4, "Press start...", WHITE) .unwrap(); context.display.update_all().unwrap(); let mut current_colour = NONE; let mut previous_colour = NONE; while !quit { while let Some(event) = context.gilrs.next_event() { match event { Event { id: _, event: EventType::ButtonPressed(Button::Start, _), .. } => { // Start button -> running context.pixel.all_off(); context.pixel.render(); context.display.clear(); context .display .draw_text(4, 4, "Running ", WHITE) .unwrap(); context.display.update().unwrap(); running = true; } Event { id: _, event: EventType::ButtonPressed(Button::Mode, _), .. } => { println!("Mode...."); // Mode to exit quit = true; break; } _ => (), }; } if running { let diff: i32 = 0; let front_dist = get_distance(&mut front, true); let right_dist = get_distance(&mut rightfront, true); let left_dist = get_distance(&mut leftfront, true); let back_dist = get_distance(&mut back, true); if direction == "Forward" { decel = get_deceleration(front_dist, MINDIST); if front_dist < MINDIST && right_dist < MAXDIST { direction = "Left"; } } if direction == "Left" { decel = get_deceleration(left_dist, MINDIST); if left_dist < MINDIST && back_dist < MAXDIST { direction = "Forward"; } if left_dist < MINDIST && front_dist < MAXDIST { direction = "Back"; } } if direction == "Back" { decel = get_deceleration(back_dist, MINDIST); if back_dist < MINDIST && left_dist < MAXDIST { control.stop(); direction = "Right"; } if back_dist < MINDIST && right_dist < MAXDIST { control.stop(); direction = "Left"; } } if direction == "Right" { decel = get_deceleration(right_dist, MINDIST); if right_dist < MINDIST { direction = "Back"; } } println!( "Direction {:#?} Fr {:#?}mm Lf {:#?}mm Rt {:#?}mm Bk {:#?}mm Decel {:?} ", direction, front_dist, left_dist, right_dist, back_dist, decel ); if direction == "Forward" { let bias = 40; left_rear_speed = SPEED + bias; right_rear_speed = SPEED * -1; left_front_speed = SPEED + bias; right_front_speed = SPEED * -1; current_colour = GREEN; } else if direction == "Back" { let bias = 40; left_rear_speed = (SPEED + bias) * -1; right_rear_speed = SPEED; left_front_speed = (SPEED + bias) * -1; right_front_speed = SPEED; current_colour = RED; } else if direction == "Right" { // Strafe Right let bias = 30; left_front_speed = SPEED * -1; left_rear_speed = SPEED - bias; right_front_speed = SPEED * -1; right_rear_speed = SPEED - bias; current_colour = PURPLE; } else if direction == "Left" { // Strafe Left let bias = 10; left_front_speed = SPEED; left_rear_speed = (SPEED + bias) * -1; right_front_speed = SPEED; right_rear_speed = (SPEED + bias) * -1; current_colour = CYAN; } else { left_rear_speed = 0; right_rear_speed = 0; left_front_speed = 0; right_front_speed = 0; current_colour = NONE; } left_rear_speed = ((left_rear_speed as f64) * decel) as i32; right_rear_speed = ((right_rear_speed as f64) * decel) as i32; left_front_speed = ((left_front_speed as f64) * decel) as i32; right_front_speed = ((right_front_speed as f64) * decel) as i32; if current_colour != previous_colour { if current_colour == RED { context.pixel.red(); } else if current_colour == GREEN { context.pixel.green(); } else if current_colour == BLUE { context.pixel.blue(); } else if current_colour == YELLOW { context.pixel.yellow(); } else if current_colour == PURPLE { context.pixel.purple(); } else if current_colour == CYAN { context.pixel.cyan(); } else if current_colour == ALL { context.pixel.white(); } else if current_colour == NONE { context.pixel.all_off(); } context.pixel.render(); previous_colour = current_colour; } println!( "Speeds lf {:?}, lr {:#?}, rf {:#?}, rr {:#?}", left_front_speed, left_rear_speed, right_front_speed, right_rear_speed ); control.speed( left_rear_speed, right_rear_speed, left_front_speed, right_front_speed, ); } } context.pixel.all_off(); context.pixel.render(); control.stop(); context.display.clear(); } fn print_colour(context: &mut Context, colour: i32) -> &str { match colour { RED => { println!("Found Red!"); context.pixel.red(); context.pixel.render(); return "Red"; } BLUE => { println!("Found Blue!"); context.pixel.blue(); context.pixel.render(); return "Blue"; } YELLOW => { println!("Found Yellow!"); context.pixel.yellow(); context.pixel.render(); return "Yellow"; } GREEN => { println!("Found Green!"); context.pixel.green(); context.pixel.render(); return "Green"; } _ => { println!("Found Unknown"); context.pixel.all_off(); context.pixel.render(); return "unknown"; } } } fn do_hubble(context: &mut Context, mut locations: [f32; 4], mut order: [i32; 4]){ const MINIMUM : u16 = 200; let interval = time::Duration::from_millis(50); // Distance sensors let mut front = try_open_tof("/dev/i2c-8"); let mut back = try_open_tof("/dev/i2c-7"); println!("front started"); println!("back started"); println!("right back started"); println!("left back started"); set_continous(&mut front); set_continous(&mut back); let mut front_dist = get_distance(&mut front, true); let mut back_dist = get_distance(&mut back, true); let mut leftback = try_open_tof("/dev/i2c-6"); let mut rightfront = try_open_tof("/dev/i2c-10"); set_continous(&mut leftback); set_continous(&mut rightfront); let mut right_dist = get_distance(&mut rightfront, true); let mut left_dist = get_distance(&mut leftback, true); let mut control = build_control(); control.init(); let mut distance: u16 = 0; let mut direction = "Start"; let mut prev_dir = "None"; let mut left_rear_speed: i32; let mut right_rear_speed: i32; let mut left_front_speed: i32; let mut right_front_speed: i32; let mut quit = false; let mut running = false; let mut gear = 1; control.set_gear(gear); control.set_bias(0); let mut decel = 0.0; context.pixel.all_on(); context.pixel.render(); context.display.clear(); context .display .draw_text(4, 4, "Press start...", WHITE) .unwrap(); context.display.update_all().unwrap(); let mut current_colour = NONE; let mut previous_colour = NONE; while !quit { while let Some(event) = context.gilrs.next_event() { match event { Event { id: _, event: EventType::ButtonPressed(Button::Start, _), .. } => { // Start button -> running context.pixel.all_off(); context.pixel.render(); context.display.clear(); context .display .draw_text(4, 4, "Running ", WHITE) .unwrap(); context.display.update().unwrap(); running = true; } Event { id: _, event: EventType::ButtonPressed(Button::Mode, _), .. } => { println!("Mode...."); // Mode to exit quit = true; break; } _ => (), }; } if running { let diff: i32 = 0; front_dist = get_distance(&mut front, true); right_dist = get_distance(&mut rightfront, true); left_dist = get_distance(&mut leftback, true); back_dist = get_distance(&mut back, true); // Hubble if direction == "Start" { decel = get_deceleration(front_dist, MINIMUM); if front_dist < MINIMUM { direction = "Left"; } } if direction == "Left" { decel = get_deceleration(left_dist, MINIMUM); if left_dist < MINIMUM { direction = "Back"; } } if direction == "Back" { decel = get_deceleration(back_dist, MINIMUM); if back_dist < MINIMUM { direction = "Right"; } } if direction == "Right" { decel = get_deceleration(right_dist, MINIMUM); if right_dist < MINIMUM { direction = "Forward"; } } if direction == "Forward" { decel = get_deceleration(front_dist, MINIMUM); if front_dist < MINIMUM { direction = "Stop"; } } println!( "Direction {:#?} Fr {:#?}mm Lf {:#?}mm Rt {:#?}mm Bk {:#?}mm Decel {:?} ", direction, front_dist, left_dist, right_dist, back_dist, decel ); if direction == "Start" || direction == "Forward" { let bias = 40; left_rear_speed = SPEED + bias; right_rear_speed = SPEED * -1; left_front_speed = SPEED + bias; right_front_speed = SPEED * -1; current_colour = GREEN; } else if direction == "Back" { let bias = 40; left_rear_speed = (SPEED + bias) * -1; right_rear_speed = SPEED; left_front_speed = (SPEED + bias) * -1; right_front_speed = SPEED; current_colour = RED; } else if direction == "Right" { // Strafe Right let bias = 30; left_front_speed = SPEED * -1; left_rear_speed = SPEED - bias; right_front_speed = SPEED * -1; right_rear_speed = SPEED - bias; current_colour = PURPLE; } else if direction == "Left" { // Strafe Left let bias = 10; left_front_speed = SPEED; left_rear_speed = (SPEED + bias) * -1; right_front_speed = SPEED; right_rear_speed = (SPEED + bias) * -1; current_colour = CYAN; } else { left_rear_speed = 0; right_rear_speed = 0; left_front_speed = 0; right_front_speed = 0; current_colour = NONE; } left_rear_speed = ((left_rear_speed as f64) * decel) as i32; right_rear_speed = ((right_rear_speed as f64) * decel) as i32; left_front_speed = ((left_front_speed as f64) * decel) as i32; right_front_speed = ((right_front_speed as f64) * decel) as i32; if current_colour != previous_colour { if current_colour == RED { context.pixel.red(); } else if current_colour == GREEN { context.pixel.green(); } else if current_colour == BLUE { context.pixel.blue(); } else if current_colour == YELLOW { context.pixel.yellow(); } else if current_colour == PURPLE { context.pixel.purple(); } else if current_colour == CYAN { context.pixel.cyan(); } else if current_colour == ALL { context.pixel.white(); } else if current_colour == NONE { context.pixel.all_off(); } context.pixel.render(); previous_colour = current_colour; } //println!( //"Speeds lf {:?}, lr {:#?}, rf {:#?}, rr {:#?}", //left_front_speed, left_rear_speed, right_front_speed, right_rear_speed //); control.speed( left_rear_speed, right_rear_speed, left_front_speed, right_front_speed, ); } } context.pixel.all_off(); context.pixel.render(); control.stop(); context.display.clear(); } use std::sync::{Arc, Mutex}; fn _do_hubble(context: &mut Context, mut locations: [f32; 4], mut order: [i32; 4]) { const DRIVING_SPEED: i32 = 1000; const TURNING_SPEED: i32 = 400; const MIN_DIST: u16 = 100; const MAX_DIST: u16 = 600; let mut left_rear_speed: i32; let mut right_rear_speed: i32; let mut left_front_speed: i32; let mut right_front_speed: i32; let interval = time::Duration::from_millis(2000); context.pixel.all_on(); let mut control = build_control(); control.init(); let mut pos = 0; //let mut compass = HMC5883L::new("/dev/i2c-1").unwrap(); let mut front = try_open_tof("/dev/i2c-8"); let mut back = try_open_tof("/dev/i2c-7"); println!("front started"); println!("back started"); set_continous(&mut front); set_continous(&mut back); context.display.clear(); context .display .draw_text(4, 4, "Press Left(E)...", WHITE) .unwrap(); context.display.update_all().unwrap(); let mut gear = 4; control.set_gear(gear); control.set_bias(0); let mut running = false; let mut quit = false; //let mut heading = compass.read_degrees().unwrap(); //let mut target = compass.read_degrees().unwrap(); let colour: i32 = NONE; let shared = Arc::new(colour); let (command_tx, command_rx) = mpsc::channel(); let t = thread::spawn(move || { let mut col = Arc::clone(&shared); println!("Thread Starting"); let mut cam = build_camera(); load_calibration(&mut cam); cam.discard_video(); loop { let value = cam.get_colour(false); match command_rx.try_recv() { Ok("X") | Err(TryRecvError::Disconnected) => { println!("Terminating."); break; } Ok(&_) | Err(TryRecvError::Empty) => {} } } }); while !quit { while let Some(event) = context.gilrs.next_event() { match event { Event { id: _, event: EventType::ButtonPressed(Button::East, _), .. } => { println!("East Pressed"); // Start button -> running context.pixel.all_off(); context .display .draw_text(4, 4, " ", WHITE) .unwrap(); context.display.update().unwrap(); running = true; } // Needs gear changing here Event { id: _, event: EventType::ButtonPressed(Button::Mode, _), .. } => { println!("Mode"); // Mode to exit let _ = command_tx.send("X"); quit = true; break; } _ => (), }; } // Main State running or not //if running { //let mut front_dist = get_distance(&mut front, true); //let mut back_dist = get_distance(&mut back, true); //let colour = cam.get_colour(true); //heading = compass.read_degrees().unwrap(); //// first time through && locations[index] == 0.0 //if order[0] == NONE || order[1] == NONE || order[2] == NONE || order[3] == NONE { //if colour == RED || colour == BLUE || colour == YELLOW || colour == GREEN { //print_colour(context, colour); //let index = colour as usize; //println!("Index {}", index); //if locations[index] == 0.0 { //control.stop(); //println!("Heading {}", heading); //locations[index] = heading; //order[pos] = colour; //print_colour(context, colour); //pos = pos + 1; //} //} //control.turn_left(TURNING_SPEED, gear); //} else { //println!("Locations: {:#?}", locations); //running = false; //for i in RED..GREEN { //let index = i as usize; //println!("Searching for {:#?}", print_colour(context, colour)); //align(locations[index], &mut compass, &mut control, &mut cam, 4); //loop { //cam.discard_video(); //front_dist = get_distance(&mut front, true); //back_dist = get_distance(&mut back, true); //if front_dist < MIN_DIST { //control.stop(); //break; //} //let decel = get_deceleration(front_dist); //let bias = 50; //left_rear_speed = DRIVING_SPEED + bias; //right_rear_speed = DRIVING_SPEED * -1; //left_front_speed = DRIVING_SPEED + bias; //right_front_speed = DRIVING_SPEED * -1; //left_rear_speed = ((left_rear_speed as f64) * decel) as i32; //right_rear_speed = ((right_rear_speed as f64) * decel) as i32; //left_front_speed = ((left_front_speed as f64) * decel) as i32; //right_front_speed = ((right_front_speed as f64) * decel) as i32; ////println!( ////"Forward Speeds lf {:?}, lr {:#?}, rf {:#?}, rr {:#?}", ////left_front_speed, left_rear_speed, right_front_speed, right_rear_speed ////); //control.speed( //left_rear_speed, //right_rear_speed, //left_front_speed, //right_front_speed, //); //} //println!("Backward"); //loop { //cam.discard_video(); //front_dist = get_distance(&mut front, true); //back_dist = get_distance(&mut back, true); //if front_dist > MAX_DIST { //control.stop(); //break; //} //let decel = get_deceleration(back_dist); //let bias = 50; //left_rear_speed = (DRIVING_SPEED + bias) * -1; //right_rear_speed = DRIVING_SPEED; //left_front_speed = (DRIVING_SPEED + bias) * -1; //right_front_speed = DRIVING_SPEED; //left_rear_speed = ((left_rear_speed as f64) * decel) as i32; //right_rear_speed = ((right_rear_speed as f64) * decel) as i32; //left_front_speed = ((left_front_speed as f64) * decel) as i32; //right_front_speed = ((right_front_speed as f64) * decel) as i32; ////println!( ////"Backward Speeds lf {:?}, lr {:#?}, rf {:#?}, rr {:#?}", ////left_front_speed, left_rear_speed, right_front_speed, right_rear_speed ////); //control.speed( //left_rear_speed, //right_rear_speed, //left_front_speed, //right_front_speed, //); //} //} //} //} } control.stop(); context.display.clear(); context.pixel.all_off(); thread::sleep(interval); } fn do_straight(context: &mut Context) { let interval = time::Duration::from_millis(2000); let mut control = build_control(); control.init(); control.set_gear(2); // Distance sensors let mut left = try_open_tof("/dev/i2c-5"); let mut right = try_open_tof("/dev/i2c-10"); println!("left front started"); println!("right front started"); set_continous(&mut left); set_continous(&mut right); context.pixel.all_on(); context.pixel.render(); context.display.clear(); context .display .draw_text(4, 4, "Press start...", WHITE) .unwrap(); context.display.update_all().unwrap(); let mut target: i32 = 0; let mut quit = false; let mut running = false; while !quit { while let Some(event) = context.gilrs.next_event() { match event { Event { id: _, event: EventType::ButtonPressed(Button::Start, _), .. } => { println!("Select Pressed"); // Start button -> running context.pixel.all_off(); target = get_distance(&mut left, true) as i32 - get_distance(&mut right, true) as i32; context .display .draw_text(4, 4, " ", WHITE) .unwrap(); context.display.update().unwrap(); println!("Target {:?}", target); running = true; } Event { id: _, event: EventType::ButtonPressed(Button::Mode, _), .. } => { println!("Mode...."); // Mode to exit quit = true; break; } _ => (), }; } if running { let mut left_rear_speed: i32 = 1000; let mut right_rear_speed: i32 = -1000; let mut left_front_speed: i32 = 1000; let mut right_front_speed: i32 = -1000; let right_dist: i32 = get_distance(&mut right, true) as i32; let left_dist: i32 = get_distance(&mut left, true) as i32; println!( "Target {:#?}mm, Right {:#?}mm, Left {:#?}mm ", target, right_dist, left_dist ); let difference: i32 = (target - (left_dist - right_dist)) * 5; if difference > 15 { // turn right context.pixel.right_red(); context.pixel.render(); println!("Turn Right {:04} ", difference); left_front_speed = left_front_speed; //+ difference; left_rear_speed = left_rear_speed; //+ difference; right_front_speed = right_front_speed + difference; right_rear_speed = right_rear_speed + difference; } else if difference < -15 { // turn left context.pixel.left_red(); context.pixel.render(); println!("Turn Left {:04} ", -difference); left_front_speed = left_front_speed + difference; left_rear_speed = left_rear_speed + difference; right_front_speed = right_front_speed; //+ difference; right_rear_speed = right_rear_speed; //+ difference; } else { //println!("Straight"); context.pixel.all_off(); context.pixel.render(); } { //if left_rear_speed != 0 || right_rear_speed != 0 || left_front_speed != 0 || right_front_speed != 0 { //println!(" {0}, {1}, {2}, {3} ", left_rear_speed, right_rear_speed, left_front_speed, right_front_speed ); //} } control.speed( left_rear_speed, right_rear_speed, left_front_speed, right_front_speed, ); } } control.stop(); context.display.clear(); context.pixel.all_off(); thread::sleep(interval); } fn do_wheels_rc(context: &mut Context) { const DEADZONE: i32 = 50; let mut control = build_control(); control.init(); let servo = build_servo(21); let mut gear = 1; let mut quit = false; let mut left_stick_y = 0; let mut right_stick_y = 0; let mut current_colour = NONE; let mut previous_colour = NONE; while !quit { while let Some(event) = context.gilrs.next_event() { match event { Event { id: _, event: EventType::ButtonPressed(Button::Mode, _), .. } => { println!("Mode Pressed"); quit = true; break; } Event { id: _, event: EventType::ButtonPressed(Button::DPadUp, _), .. } => { println!("DPad Up Pressed"); servo.set_pulse_width(2500); } Event { id: _, event: EventType::ButtonPressed(Button::DPadDown, _), .. } => { println!("DPad Up Pressed"); servo.set_pulse_width(500); } Event { id: _, event: EventType::ButtonPressed(Button::North, _), .. } => { gear = 1; context .display .draw_text(4, 4, &gear.to_string(), LT_GREY) .unwrap(); context.display.update().unwrap(); println!(" {0} ", gear); } Event { id: _, event: EventType::ButtonPressed(Button::West, _), .. } => { gear = 2; context .display .draw_text(4, 4, &gear.to_string(), LT_GREY) .unwrap(); context.display.update().unwrap(); println!(" {0} ", gear); } Event { id: _, event: EventType::ButtonPressed(Button::East, _), .. } => { gear = 3; context .display .draw_text(4, 4, &gear.to_string(), LT_GREY) .unwrap(); context.display.update().unwrap(); println!(" {0} ", gear); } Event { id: _, event: EventType::ButtonPressed(Button::South, _), .. } => { gear = 4; context .display .draw_text(4, 4, &gear.to_string(), LT_GREY) .unwrap(); context.display.update().unwrap(); println!(" {0} ", gear); } Event { id: _, event: EventType::AxisChanged(LeftStickY, value, _), .. } => { //println!("Left Stick Y {:?}", value); left_stick_y = (value * 1000.0) as i32; } Event { id: _, event: EventType::AxisChanged(RightStickY, value, _), .. } => { //println!("Right Stick Y {:?}", value); right_stick_y = (value * 1000.0) as i32; } _ => { break; } }; let mut left_rear_speed: i32; let mut right_rear_speed: i32; let mut left_front_speed: i32; let mut right_front_speed: i32; if left_stick_y > DEADZONE && right_stick_y > DEADZONE { // Forward left_front_speed = left_stick_y; left_rear_speed = left_stick_y; right_front_speed = -right_stick_y; right_rear_speed = -right_stick_y; current_colour = GREEN; } else if left_stick_y < -DEADZONE && right_stick_y < -DEADZONE { // Backwards left_front_speed = left_stick_y; left_rear_speed = left_stick_y; right_front_speed = -right_stick_y; right_rear_speed = -right_stick_y; current_colour = RED; } else if left_stick_y > DEADZONE && right_stick_y < -DEADZONE { // Turn Sharp Right left_front_speed = left_stick_y; left_rear_speed = left_stick_y; right_front_speed = -right_stick_y; right_rear_speed = -right_stick_y; current_colour = YELLOW; } else if left_stick_y < -DEADZONE && right_stick_y > DEADZONE { // Turn Sharp Left left_front_speed = left_stick_y; left_rear_speed = left_stick_y; right_front_speed = -right_stick_y; right_rear_speed = -right_stick_y; current_colour = BLUE; } else if left_stick_y > DEADZONE && right_stick_y == 0 { // Turn Right left_front_speed = left_stick_y; left_rear_speed = left_stick_y; right_front_speed = -right_stick_y; right_rear_speed = -right_stick_y; current_colour = YELLOW; } else if left_stick_y == 0 && right_stick_y > DEADZONE { // Turn Left left_front_speed = left_stick_y; left_rear_speed = left_stick_y; right_front_speed = -right_stick_y; right_rear_speed = -right_stick_y; current_colour = BLUE; } else { left_rear_speed = 0; right_rear_speed = 0; left_front_speed = 0; right_front_speed = 0; current_colour = NONE; } left_front_speed = left_front_speed / gear; right_front_speed = right_front_speed / gear; left_rear_speed = left_rear_speed / gear; right_rear_speed = right_rear_speed / gear; if left_rear_speed != 0 || right_rear_speed != 0 || left_front_speed != 0 || right_front_speed != 0 { println!( "Speed left rear: {0}, right rear: {1}, left front: {2} right front: {3}", left_rear_speed, right_rear_speed, left_front_speed, right_front_speed ); } if current_colour != previous_colour { if current_colour == RED { context.pixel.red(); } else if current_colour == GREEN { context.pixel.green(); } else if current_colour == BLUE { context.pixel.blue(); } else if current_colour == YELLOW { context.pixel.yellow(); } else if current_colour == PURPLE { context.pixel.purple(); } else if current_colour == CYAN { context.pixel.cyan(); } else if current_colour == ALL { context.pixel.white(); } else if current_colour == NONE { context.pixel.all_off(); } context.pixel.render(); previous_colour = current_colour; } control.speed( left_rear_speed, right_rear_speed, left_front_speed, right_front_speed, ); } } control.stop(); context.display.clear(); } fn do_mecanum_rc(context: &mut Context) { const DEADZONE: i32 = 200; let mut control = build_control(); control.init(); let servo = build_servo(21); let mut gear = 3; control.set_gear(gear); let mut left_stick_x = 0; let mut left_stick_y = 0; let mut right_stick_y = 0; let mut right_stick_x = 0; let mut current_colour = NONE; let mut previous_colour = NONE; let mut dpad = 0; let mut quit = false; while !quit { while let Some(event) = context.gilrs.next_event() { match event { Event { id: _, event: EventType::ButtonPressed(Button::Mode, _), .. } => { println!("Mode Pressed"); quit = true; break; } Event { id: _, event: EventType::ButtonPressed(Button::DPadUp, _), .. } => { println!("DPad Up Pressed"); servo.set_pulse_width(2500); } Event { id: _, event: EventType::ButtonPressed(Button::DPadDown, _), .. } => { println!("DPad Up Pressed"); servo.set_pulse_width(500); } Event { id: _, event: EventType::ButtonPressed(Button::North, _), .. } => { gear = 1; context .display .draw_text(4, 4, &gear.to_string(), LT_GREY) .unwrap(); context.display.update().unwrap(); println!(" {0} ", gear); } Event { id: _, event: EventType::ButtonPressed(Button::West, _), .. } => { gear = 2; context .display .draw_text(4, 4, &gear.to_string(), LT_GREY) .unwrap(); context.display.update().unwrap(); println!(" {0} ", gear); } Event { id: _, event: EventType::ButtonPressed(Button::East, _), .. } => { gear = 3; context .display .draw_text(4, 4, &gear.to_string(), LT_GREY) .unwrap(); context.display.update().unwrap(); println!(" {0} ", gear); } Event { id: _, event: EventType::ButtonPressed(Button::South, _), .. } => { gear = 4; context .display .draw_text(4, 4, &gear.to_string(), LT_GREY) .unwrap(); context.display.update().unwrap(); println!(" {0} ", gear); } Event { id: _, event: EventType::AxisChanged(LeftStickY, value, _), .. } => { //println!("Left Stick Y {:?}", value); left_stick_y = (value * 1000.0) as i32; } Event { id: _, event: EventType::AxisChanged(LeftStickX, value, _), .. } => { //println!("Left Stick X {:?}", value); left_stick_x = (value * 1000.0) as i32; } Event { id: _, event: EventType::AxisChanged(RightStickY, value, _), .. } => { //println!("Right Stick Y {:?}", value); right_stick_y = (value * 1000.0) as i32; } Event { id: _, event: EventType::AxisChanged(RightStickX, value, _), .. } => { //println!("Right Stick X {:?}", value); right_stick_x = (value * 1000.0) as i32; } _ => { break; } }; let mut left_rear_speed: i32; let mut right_rear_speed: i32; let mut left_front_speed: i32; let mut right_front_speed: i32; if left_stick_y > DEADZONE && right_stick_y > DEADZONE { // Forward left_front_speed = left_stick_y; left_rear_speed = left_stick_y; right_front_speed = -right_stick_y; right_rear_speed = -right_stick_y; current_colour = GREEN; } else if left_stick_y < -DEADZONE && right_stick_y < -DEADZONE { // Backwards left_front_speed = left_stick_y; left_rear_speed = left_stick_y; right_front_speed = -right_stick_y; right_rear_speed = -right_stick_y; current_colour = RED; } else if left_stick_y > DEADZONE && right_stick_y < -DEADZONE { // Turn Right left_front_speed = left_stick_y; left_rear_speed = left_stick_y; right_front_speed = -right_stick_y; right_rear_speed = -right_stick_y; current_colour = YELLOW; } else if left_stick_y < -DEADZONE && right_stick_y > DEADZONE { // Turn Left left_front_speed = left_stick_y; left_rear_speed = left_stick_y; right_front_speed = -right_stick_y; right_rear_speed = -right_stick_y; current_colour = BLUE; } else if left_stick_x < -DEADZONE && right_stick_x < -DEADZONE { // Strafe left left_front_speed = -left_stick_x; left_rear_speed = left_stick_x; right_front_speed = -right_stick_x; right_rear_speed = right_stick_x; current_colour = PURPLE; } else if left_stick_x > DEADZONE && right_stick_x > DEADZONE { // Strafe Right left_front_speed = -left_stick_x; left_rear_speed = left_stick_x; right_front_speed = -right_stick_x; right_rear_speed = right_stick_x; current_colour = CYAN; } else { left_rear_speed = 0; right_rear_speed = 0; left_front_speed = 0; right_front_speed = 0; current_colour = NONE; } if left_rear_speed != 0 || right_rear_speed != 0 || left_front_speed != 0 || right_front_speed != 0 { println!( "Stick left XY: {0},{1} right X:Y {2},{3}", left_stick_x, left_stick_y, right_stick_x, right_stick_y ); println!( "Speed left rear: {0}, right rear: {1}, left front: {2} right front: {3}", left_rear_speed, right_rear_speed, left_front_speed, right_front_speed ); } if current_colour != previous_colour { if current_colour == RED { context.pixel.red(); } else if current_colour == GREEN { context.pixel.green(); } else if current_colour == BLUE { context.pixel.blue(); } else if current_colour == YELLOW { context.pixel.yellow(); } else if current_colour == PURPLE { context.pixel.purple(); } else if current_colour == CYAN { context.pixel.cyan(); } else if current_colour == ALL { context.pixel.white(); } else if current_colour == NONE { context.pixel.all_off(); } context.pixel.render(); previous_colour = current_colour; } control.set_gear(gear); control.speed( left_rear_speed, right_rear_speed, left_front_speed, right_front_speed, ); } } control.stop(); context.pixel.all_off(); context.display.clear(); } fn try_open_tof(filename: &'static str) -> Option<VL53L0X> { let front = match VL53L0X::new(filename) { Ok(front) => front, Err(e) => { println!("Failed to open front TOF {:?} ", e); return None; } }; println!("Success {:?}", filename); return Some(front); } fn get_distance(tof: &mut Option<VL53L0X>, continous: bool) -> u16 { let dist: u16; if continous { match tof { None => dist = 0, Some(ref mut tof) => { dist = tof.read_continous(); } } } else { match tof { None => dist = 0, Some(ref mut tof) => { dist = tof.read(); } } } return dist; } fn set_continous(tof: &mut Option<VL53L0X>) { match tof { None => (), Some(ref mut tof) => match tof.start_continuous() { Ok(()) => { println!("Set continuous"); } Err(e) => { println!("Failed to set continuous {:?}", e); } }, } } fn do_run_tests(context: &mut Context) { //let mut cam = build_camera(); //load_calibration( &mut cam ); // Test compass //let mut compass = HMC5883L::new("/dev/i2c-1").unwrap(); //println!("Compass started"); // Test distance sensors group 1 (not always present) let mut front = try_open_tof("/dev/i2c-8"); let mut leftfront = try_open_tof("/dev/i2c-5"); let mut rightfront = try_open_tof("/dev/i2c-10"); // Test distance sensors group 2 let mut back = try_open_tof("/dev/i2c-7"); let mut leftback = try_open_tof("/dev/i2c-6"); let mut rightback = try_open_tof("/dev/i2c-9"); //let mut heading = compass.read_degrees().unwrap(); set_continous(&mut back); set_continous(&mut leftback); set_continous(&mut rightback); let mut bk_dist = get_distance(&mut back, true); let mut lb_dist = get_distance(&mut leftback, true); let mut rb_dist = get_distance(&mut rightback, true); set_continous(&mut front); let mut ft_dist = get_distance(&mut front, true); set_continous(&mut leftfront); set_continous(&mut rightfront); let mut lf_dist = get_distance(&mut leftfront, true); let mut rf_dist = get_distance(&mut rightfront, true); let mut colour_visible = 0; context.pixel.all_on(); context.pixel.render(); let interval = Duration::from_millis(200); let mut now = Instant::now(); let (command_tx, command_rx) = mpsc::channel(); let (data_tx, data_rx) = mpsc::channel(); let t = thread::spawn(move || { println!("Thread Starting"); let colour: i32 = 0; let mut cam = build_camera(); load_calibration(&mut cam); loop { cam.discard_video(); match command_rx.try_recv() { Ok("X") | Err(TryRecvError::Disconnected) => { println!("Terminating."); break; } Ok("F") => { let colour = cam.get_colour(false); if colour == RED { let _ = data_tx.send("0"); } if colour == BLUE { let _ = data_tx.send("1"); } if colour == YELLOW { let _ = data_tx.send("2"); } if colour == GREEN { let _ = data_tx.send("3"); } } Ok(&_) | Err(TryRecvError::Empty) => {} } } }); let mut quit = false; while !quit { match data_rx.try_recv() { Ok("0") => { print_colour(context, RED); colour_visible = RED; } Ok("1") => { print_colour(context, BLUE); colour_visible = BLUE; } Ok("2") => { print_colour(context, YELLOW); colour_visible = YELLOW; } Ok("3") => { print_colour(context, GREEN); colour_visible = GREEN; } Ok(_) | Err(_) => {} } while let Some(event) = context.gilrs.next_event() { context.gilrs.update(&event); match event { Event { id: _, event: EventType::ButtonPressed(Button::Mode, _), .. } => { //println!("Mode Pressed"); quit = true; break; } Event { id: _, event: EventType::ButtonPressed(Button::North, _), .. } => { //println!("North Pressed"); //let colour_visible = cam.get_colour( false ); let _ = command_tx.send("F"); //heading = compass.read_degrees().unwrap(); bk_dist = get_distance(&mut back, true); lb_dist = get_distance(&mut leftback, true); rb_dist = get_distance(&mut rightback, true); ft_dist = get_distance(&mut front, true); lf_dist = get_distance(&mut leftfront, true); rf_dist = get_distance(&mut rightfront, true); context.display.clear(); //context //.display //.draw_text(0, 8, "Head: ", WHITE) //.unwrap(); //context //.display //.draw_text(56, 8, &format!("{:5.2} ", heading), WHITE) //.unwrap(); context.display.draw_text(0, 16, "LB:", WHITE).unwrap(); context .display .draw_text(56, 16, &format!("{:5.2} ", lb_dist), WHITE) .unwrap(); context.display.draw_text(0, 24, "RB:", WHITE).unwrap(); context .display .draw_text(56, 24, &format!("{:5.2} ", rb_dist), WHITE) .unwrap(); context.display.draw_text(0, 40, "Back:", WHITE).unwrap(); context .display .draw_text(56, 40, &format!("{:5.2} ", bk_dist), WHITE) .unwrap(); context.display.draw_text(0, 32, "Front:", WHITE).unwrap(); context .display .draw_text(56, 32, &format!("{:5.2} ", ft_dist), WHITE) .unwrap(); context.display.draw_text(0, 48, "LF:", WHITE).unwrap(); context .display .draw_text(56, 48, &format!("{:5.2} ", lf_dist), WHITE) .unwrap(); context.display.draw_text(0, 56, "RF:", WHITE).unwrap(); context .display .draw_text(56, 56, &format!("{:5.2} ", rf_dist), WHITE) .unwrap(); context.display.draw_text(0, 64, "Colour:", WHITE).unwrap(); context.display.update().unwrap(); break; } _ => { break; } }; } //heading = compass.read_degrees().unwrap(); ft_dist = get_distance(&mut front, true); bk_dist = get_distance(&mut back, true); lb_dist = get_distance(&mut leftback, true); rb_dist = get_distance(&mut rightback, true); lf_dist = get_distance(&mut leftfront, true); rf_dist = get_distance(&mut rightfront, true); //println!("Current Heading {:5.2} ", heading); println!("Left Back Distance {:5.2} ", lb_dist); println!("Back Distance {:5.2} ", bk_dist); println!("Front Distance {:5.2} ", ft_dist); println!("Right Back Distance {:5.2} ", rb_dist); println!("Left Front Distance {:5.2} ", lf_dist); println!("Right Front Distance {:5.2} ", rf_dist); } context.pixel.all_off(); context.pixel.render(); } fn load_calibration(cam: &mut Camera) { let file = fs::File::open("calibrate.json").expect("file should open read only"); let mut calibrate: Calibrate = serde_json::from_reader(file).expect("file should be proper JSON"); println!("Calibrate {:?}", calibrate); cam.set_red_lower(&mut calibrate.red_lower); cam.set_red_upper(&mut calibrate.red_upper); cam.set_green_lower(&mut calibrate.green_lower); cam.set_green_upper(&mut calibrate.green_upper); cam.set_blue_lower(&mut calibrate.blue_lower); cam.set_blue_upper(&mut calibrate.blue_upper); cam.set_yellow_lower(&mut calibrate.yellow_lower); cam.set_yellow_upper(&mut calibrate.yellow_upper); cam.dump_bounds(); } fn do_calibrate(context: &mut Context) { const DIST: u16 = 300; context.pixel.all_off(); context.pixel.render(); //let mut compass = HMC5883L::new("/dev/i2c-1").unwrap(); let mut front = try_open_tof("/dev/i2c-8"); let mut leftfront = try_open_tof("/dev/i2c-5"); let mut rightfront = try_open_tof("/dev/i2c-10"); let mut back = try_open_tof("/dev/i2c-7"); println!("front started"); println!("left front started"); println!("right front started"); println!("back started"); set_continous(&mut front); set_continous(&mut leftfront); set_continous(&mut rightfront); set_continous(&mut back); let mut control = build_control(); control.init(); let mut distance: u16 = 0; let mut direction = "Front"; let mut diff: i32 = 0; //let original = compass.read_degrees().unwrap(); //let mut heading = compass.read_degrees().unwrap(); let mut left_rear_speed: i32 = 0; let mut right_rear_speed: i32 = 0; let mut left_front_speed: i32 = 0; let mut right_front_speed: i32 = 0; let mut quit = false; let mut running = false; let mut direction = "North"; let mut gear = 1; control.set_gear(gear); control.set_bias(0); let mut decel = 0.0; //println!( //"Init Original {:#?}°, Heading {:#?}° Diff {:#?} Decel {:?}", //original, heading, diff, decel //); context.pixel.all_on(); context.pixel.render(); context.display.clear(); context .display .draw_text(4, 4, "Press a key...", WHITE) .unwrap(); context.display.update_all().unwrap(); while !quit { while let Some(event) = context.gilrs.next_event() { context.gilrs.update(&event); match event { Event { id: _, event: EventType::ButtonPressed(Button::Mode, _), .. } => { println!("Mode Pressed"); quit = true; running = false; break; } Event { id: _, event: EventType::ButtonPressed(Button::North, _), .. } => { direction = "North"; running = true; } Event { id: _, event: EventType::ButtonPressed(Button::South, _), .. } => { direction = "South"; running = true; } Event { id: _, event: EventType::ButtonPressed(Button::West, _), .. } => { direction = "West"; running = true; } Event { id: _, event: EventType::ButtonPressed(Button::East, _), .. } => { direction = "East"; running = true; } Event { id: _, event: EventType::ButtonPressed(Button::Start, _), .. } => { direction = "Align"; running = true; } _ => { // Swallow event } }; } if running { //println!("Direction {:?}",direction); let front_distance = get_distance(&mut front, true); let left_distance = get_distance(&mut leftfront, true); let right_distance = get_distance(&mut rightfront, true); let back_distance = get_distance(&mut back, true); let mut distance = 0 as u16; //heading = compass.read_degrees().unwrap(); let diff = 0; if direction == "Align" { control.stop(); context.pixel.all_off(); context.pixel.render(); //align(original, &mut compass, &mut control, 4); direction = "None" } if direction == "North" { let bias = 40; distance = front_distance; decel = get_deceleration(distance, DIST); left_rear_speed = SPEED + bias; right_rear_speed = SPEED * -1; left_front_speed = SPEED + bias; right_front_speed = SPEED * -1; } if direction == "South" { let bias = 40; distance = back_distance; decel = get_deceleration(distance, DIST); left_rear_speed = (SPEED + bias) * -1; right_rear_speed = SPEED; left_front_speed = (SPEED + bias) * -1; right_front_speed = SPEED; } if direction == "West" { let bias = 10; distance = left_distance; //if distance > DIST { //distance = distance - DIST; //} decel = get_deceleration(distance, DIST); left_front_speed = SPEED; left_rear_speed = (SPEED + bias) * -1; right_front_speed = SPEED; right_rear_speed = (SPEED + bias) * -1; } if direction == "East" { let bias = 30; distance = right_distance; decel = get_deceleration(distance, DIST); left_front_speed = SPEED * -1; left_rear_speed = SPEED - bias; right_front_speed = SPEED * -1; right_rear_speed = SPEED - bias; } if direction == "North" || direction == "South" || direction == "West" || direction == "East" { left_rear_speed = ((left_rear_speed as f64) * decel) as i32; right_rear_speed = ((right_rear_speed as f64) * decel) as i32; left_front_speed = ((left_front_speed as f64) * decel) as i32; right_front_speed = ((right_front_speed as f64) * decel) as i32; control.set_gear(gear); control.set_bias(0); control.speed( left_rear_speed, right_rear_speed, left_front_speed, right_front_speed, ); //println!( //"Direction {:?} Org {:#?}° Head {:#?}° Decel {:#?},Dist {:?}", //direction, original, heading, decel, distance //); println!( "Speeds lf {:?}, lr {:#?}, rf {:#?}, rr {:#?}", left_front_speed, left_rear_speed, right_front_speed, right_rear_speed ); if distance < MINDIST { control.stop(); running = false; println!("Done"); } } } } control.stop(); context.pixel.all_off(); context.pixel.render(); } //use std::sync::Arc; //use std::sync::Mutex; fn _do_calibrate(context: &mut Context) { context.pixel.all_off(); context.pixel.render(); let (command_tx, command_rx) = mpsc::channel(); let (data_tx, data_rx) = mpsc::channel(); let t = thread::spawn(move || { println!("Thread Starting"); let colour: i32 = 0; let mut cam = build_camera(); load_calibration(&mut cam); loop { cam.discard_video(); println!("Thread running"); match command_rx.try_recv() { Ok("X") | Err(TryRecvError::Disconnected) => { println!("Terminating."); break; } Ok("F") => { let colour = cam.get_colour(false); if colour == RED { let _ = data_tx.send("0"); } if colour == BLUE { let _ = data_tx.send("1"); } if colour == YELLOW { let _ = data_tx.send("2"); } if colour == GREEN { let _ = data_tx.send("3"); } println!("Colour {:?}", colour); } Ok(&_) | Err(TryRecvError::Empty) => {} } } }); let mut quit = false; while !quit { match data_rx.try_recv() { Ok("0") => { print_colour(context, RED); } Ok("1") => { print_colour(context, BLUE); } Ok("2") => { print_colour(context, YELLOW); } Ok("3") => { print_colour(context, GREEN); } Ok(_) | Err(_) => {} } while let Some(event) = context.gilrs.next_event() { context.gilrs.update(&event); match event { Event { id: _, event: EventType::ButtonPressed(Button::Mode, _), .. } => { //println!("Mode Pressed"); { let _ = command_tx.send("X"); } quit = true; break; } Event { id: _, event: EventType::ButtonPressed(Button::Start, _), .. } => { //println!("Mode Pressed"); { let _ = command_tx.send("F"); } break; } _ => { break; } } } } context.pixel.all_off(); context.pixel.render(); } fn show_menu(context: &mut Context, menu: i8) { context.display.clear(); context.display.draw_text(20, 42, "Forest", WHITE).unwrap(); context .display .draw_text(20, 50, "Fighters", WHITE) .unwrap(); context .display .draw_text(20, 58, "Ready...", WHITE) .unwrap(); context.display.update_all().unwrap(); context.display.clear(); context .display .draw_text(4, 4, "Forest Fighters", LT_GREY) .unwrap(); if menu == 0 { let tiny = image::open("The Canyons of Mars Menu Item.jpg").unwrap(); context.display.draw_image(0, 16, tiny).unwrap(); context .display .draw_text(4, 108, "Canyons of Mars", WHITE) .unwrap(); } else if menu == 1 { let tiny = image::open("Hubble Telescope Item Menu.jpg").unwrap(); context.display.draw_image(0, 16, tiny).unwrap(); context .display .draw_text(12, 108, "Hubble T'scope", WHITE) .unwrap(); } else if menu == 2 { let tiny = image::open("Blast Off Menu Item.jpg").unwrap(); context.display.draw_image(0, 16, tiny).unwrap(); context .display .draw_text(40, 108, "Blast Off", WHITE) .unwrap(); } else if menu == 3 { let tiny = image::open("Large Wheels Menu Item.jpg").unwrap(); context.display.draw_image(0, 16, tiny).unwrap(); context .display .draw_text(4, 108, "Large Wheels RC", WHITE) .unwrap(); } else if menu == 4 { let tiny = image::open("Mecanum Wheels Menu Item.jpg").unwrap(); context.display.draw_image(0, 16, tiny).unwrap(); context .display .draw_text(28, 108, "Mecanum RC", WHITE) .unwrap(); } else if menu == 5 { let tiny = image::open("Exit Menu Item.jpg").unwrap(); context.display.draw_image(0, 16, tiny).unwrap(); context.display.draw_text(56, 108, "EXIT", WHITE).unwrap(); } else if menu == 6 { let tiny = image::open("Shutdown Menu Item.jpg").unwrap(); context.display.draw_image(0, 16, tiny).unwrap(); context .display .draw_text(32, 108, "SHUTDOWN", WHITE) .unwrap(); } else if menu == 7 { let tiny = image::open("RunTests.jpg").unwrap(); context.display.draw_image(0, 16, tiny).unwrap(); context .display .draw_text(32, 108, "Run Tests", WHITE) .unwrap(); } else if menu == 8 { let tiny = image::open("Calibrate.jpg").unwrap(); context.display.draw_image(0, 16, tiny).unwrap(); context .display .draw_text(32, 108, "Calibrate", WHITE) .unwrap(); } context.display.update_all().unwrap(); } fn main() { // Uncomment to test //_test(); // sensors //_test2(); // camera //_test3(); // pixels //_test4(); // display //_test5(); // Motors //return; // A list of locations, colours are updated when found. let locations = [0.0, 0.0, 0.0, 0.0]; let order = [NONE, NONE, NONE, NONE]; //let mut pixel = build_pixel(); //let mut gilrs = Gilrs::new().unwrap(); //let mut display = SSD1327::new("/dev/i2c-3"); let mut context = Context::new("/dev/i2c-3"); context.display.begin().unwrap(); context.display.clear(); context.display.draw_text(20, 42, "Forest", WHITE).unwrap(); context .display .draw_text(20, 50, "Fighters", WHITE) .unwrap(); context .display .draw_text(20, 58, "Ready...", WHITE) .unwrap(); context.display.update_all().unwrap(); let mut menu: i8 = 0; let mut prev: i8 = -1; let mut quit = false; while !quit { if menu > 8 { menu = 0; } else if menu < 0 { menu = 8; } if menu != prev { prev = menu; show_menu(&mut context, menu); } while let Some(event) = context.gilrs.next_event() { match event { Event { id: _, event: EventType::ButtonPressed(Button::DPadRight, _), .. } => { menu = menu + 1; } Event { id: _, event: EventType::ButtonPressed(Button::DPadLeft, _), .. } => { menu = menu - 1; } Event { id: _, event: EventType::ButtonPressed(Button::Select, _), .. } => { if menu == 0 { context.display.clear(); context .display .draw_text(4, 4, "Canyon...", LT_GREY) .unwrap(); context.display.update_all().unwrap(); do_canyon(&mut context); prev = -1; } if menu == 1 { context.display.clear(); context .display .draw_text(4, 4, "Hubble...", LT_GREY) .unwrap(); context.display.update_all().unwrap(); do_hubble(&mut context, locations, order); prev = -1; } if menu == 2 { context.display.clear(); context .display .draw_text(4, 4, "Blast Off...", LT_GREY) .unwrap(); context.display.update_all().unwrap(); do_straight(&mut context); prev = -1; } if menu == 3 { context.display.clear(); context .display .draw_text(4, 4, "Wheels RC...", LT_GREY) .unwrap(); context.display.update_all().unwrap(); do_wheels_rc(&mut context); prev = -1; } if menu == 4 { context.display.clear(); context .display .draw_text(4, 4, "Mecanum RC...", LT_GREY) .unwrap(); context.display.update_all().unwrap(); do_mecanum_rc(&mut context); prev = -1; } if menu == 5 { context.display.clear(); context .display .draw_text(4, 4, "Exiting...", LT_GREY) .unwrap(); context.display.update_all().unwrap(); quit = true; break; } if menu == 6 { context.display.clear(); context .display .draw_text(4, 4, "Shutdown...", LT_GREY) .unwrap(); context.display.update_all().unwrap(); Command::new("halt").spawn().expect("halt command failed"); quit = true; break; } if menu == 7 { context.display.clear(); context .display .draw_text(4, 4, "Run Tests...", LT_GREY) .unwrap(); context.display.update_all().unwrap(); do_run_tests(&mut context); prev = -1; break; } if menu == 8 { context.display.clear(); context .display .draw_text(4, 4, "Calibrate", LT_GREY) .unwrap(); context.display.update_all().unwrap(); do_calibrate(&mut context); prev = -1; break; } } _ => (), }; } } context.display.clear(); context.display.update_all().unwrap(); thread::sleep(time::Duration::from_millis(2000)); }
use std::iter::FromIterator; pub struct SimpleLinkedList<T> { head: Option<Box<Node<T>>>, length: usize, } struct Node<T> { value: T, next: Option<Box<Node<T>>>, } impl<T> SimpleLinkedList<T> { pub fn new() -> Self { SimpleLinkedList { head: None, length: 0, } } pub fn is_empty(&self) -> bool { self.head.is_none() } pub fn len(&self) -> usize { self.length } pub fn push(&mut self, element: T) { let mut temp = Box::new(Node{value: element, next: None}); if self.head.is_some() { temp.next = std::mem::take(&mut self.head); } self.head = Some(temp); self.length += 1 } pub fn pop(&mut self) -> Option<T> { let temp = std::mem::take(&mut self.head); if temp.is_none() { return None; } else { let node = *temp.unwrap(); self.head = node.next; self.length -= 1; return Some(node.value); } } pub fn peek(&self) -> Option<&T> { if self.head.is_none() { return None; } else { let node_ref = &self.head.as_ref().unwrap(); return Some(&node_ref.value) } } #[must_use] pub fn rev(&mut self) -> SimpleLinkedList<T> { let mut prev = None; let mut curr = std::mem::take(&mut self.head); while let Some(mut node) = curr.take() { let next = node.next.take(); node.next = prev.take(); prev = Some(node); curr = next; } SimpleLinkedList { head: prev, length: self.length, } } } impl<T> FromIterator<T> for SimpleLinkedList<T> { fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self { let mut l = SimpleLinkedList::new(); for i in iter { l.push(i) } l } } // In general, it would be preferable to implement IntoIterator for SimpleLinkedList<T> // instead of implementing an explicit conversion to a vector. This is because, together, // FromIterator and IntoIterator enable conversion between arbitrary collections. // Given that implementation, converting to a vector is trivial: // // let vec: Vec<_> = simple_linked_list.into_iter().collect(); // // The reason this exercise's API includes an explicit conversion to Vec<T> instead // of IntoIterator is that implementing that interface is fairly complicated, and // demands more of the student than we expect at this point in the track. impl<T> From<SimpleLinkedList<T>> for Vec<T> { fn from(mut linked_list: SimpleLinkedList<T>) -> Vec<T> { let mut v = Vec::new(); let mut linked_list_rev = linked_list.rev(); while let Some(i) = linked_list_rev.pop() { v.push(i) } v } }
struct Artwork { name: String, } fn main() { let art1 = Artwork { name: "Boy with Apple".to_string(), }; }
use crate::things::thing::Thing; pub struct Scenery { pub thing: Thing, } impl Scenery {}
mod sqlstate_detail; use std::fmt::Display; use serde::{Deserialize, Serialize}; use self::sqlstate_detail::{SqlStateCategory, SqlStateClass, SqlStateDetail}; /// SQLSTATE. /// /// Errors for specific SQL part (`SQL/JRT`, for example) are omitted. /// /// See: <https://en.wikipedia.org/wiki/SQLSTATE> #[allow(missing_docs)] #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize, new)] pub enum SqlState { SuccessfulCompletion, Warning, WarningCursorOperationConflict, WarningDisconnectError, WarningNullValueEliminatedInSetFunction, WarningStringDataRightTruncation, WarningInsufficientItemDescriptorAreas, WarningPrivilegeNotRevoked, WarningPrivilegeNotGranted, WarningSearchConditionTooLongForInformationSchema, WarningQueryExpressionTooLongForInformationSchema, WarningDefaultValueTooLongForInformationSchema, WarningResultSetsReturned, WarningAdditionalResultSetsReturned, WarningAttemptToReturnTooManyResultSets, WarningStatementTooLongForInformationSchema, WarningInvalidNumberOfConditions, WarningArrayDataRightTruncation, NoData, NoDataNoAdditionalResultSetsReturned, DynamicSQLError, DynamicSQLErrorUsingClauseDoesNotMatchDynamicParameterSpecifications, DynamicSQLErrorUsingClauseDoesNotMatchTargetSpecifications, DynamicSQLErrorCursorSpecificationCannotBeExecuted, DynamicSQLErrorUsingClauseRequiredForDynamicParameters, DynamicSQLErrorPreparedStatementNotACursorSpecification, DynamicSQLErrorRestrictedDataTypeAttributeViolation, DynamicSQLErrorUsingClauseRequiredForResultFields, DynamicSQLErrorInvalidDescriptorCount, DynamicSQLErrorInvalidDescriptorIndex, DynamicSQLErrorDataTypeTransformFunctionViolation, DynamicSQLErrorUndefinedDATAValue, DynamicSQLErrorInvalidDATATarget, DynamicSQLErrorInvalidLEVELValue, DynamicSQLErrorInvalidDatetimeIntervalCode, ConnectionException, ConnectionExceptionSQLclientUnableToEstablishSQLconnection, ConnectionExceptionConnectionNameInUse, ConnectionExceptionConnectionDoesNotExist, ConnectionExceptionSQLserverRejectedEstablishmentOfSQLconnection, ConnectionExceptionConnectionFailure, ConnectionExceptionTransactionResolutionUnknown, ConnectionExceptionDatabaseNotOpen, ConnectionExceptionDatabaseAlreadyOpen, TriggeredActionException, FeatureNotSupported, FeatureNotSupportedMultipleServerTransactions, InvalidTargetTypeSpecification, InvalidSchemaNameListSpecification, LocatorException, LocatorExceptionInvalidSpecification, InvalidGrantor, InvalidSQLinvokedProcedureReference, InvalidRoleSpecification, InvalidTransformGroupNameSpecification, TargetTableDisagreesWithCursorSpecification, AttemptToAssignToNonupdatableColumn, AttemptToAssignToOrderingColumn, ProhibitedStatementEncounteredDuringTriggerExecution, ProhibitedStatementEncounteredDuringTriggerExecutionModifyTableModifiedByDataChangeDeltaTable, DiagnosticsException, DiagnosticsExceptionMaximumNumberOfStackedDiagnosticsAreasExceeded, CardinalityViolation, DataException, DataExceptionStringDataRightTruncation, DataExceptionNullValueNoIndicatorParameter, DataExceptionNumericValueOutOfRange, DataExceptionNullValueNotAllowed, DataExceptionErrorInAssignment, DataExceptionInvalidIntervalFormat, DataExceptionInvalidDatetimeFormat, DataExceptionDatetimeFieldOverflow, DataExceptionInvalidTimeZoneDisplacementValue, DataExceptionEscapeCharacterConflict, DataExceptionInvalidUseOfEscapeCharacter, DataExceptionInvalidEscapeOctet, DataExceptionNullValueInArrayTarget, DataExceptionZerolengthCharacterString, DataExceptionMostSpecificTypeMismatch, DataExceptionSequenceGeneratorLimitExceeded, DataExceptionIntervalValueOutOfRange, DataExceptionMultisetValueOverflow, DataExceptionInvalidIndicatorParameterValue, DataExceptionSubstringError, DataExceptionDivisionByZero, DataExceptionInvalidPrecedingOrFollowingSizeInWindowFunction, DataExceptionInvalidArgumentForNTILEFunction, DataExceptionIntervalFieldOverflow, DataExceptionInvalidArgumentForNthValueFunction, DataExceptionInvalidCharacterValueForCast, DataExceptionInvalidEscapeCharacter, DataExceptionInvalidRegularExpression, DataExceptionNullRowNotPermittedInTable, DataExceptionInvalidArgumentForNaturalLogarithm, DataExceptionInvalidArgumentForPowerFunction, DataExceptionInvalidArgumentForWidthBucketFunction, DataExceptionInvalidRowVersion, DataExceptionInvalidQueryRegularExpression, DataExceptionInvalidQueryOptionFlag, DataExceptionAttemptToReplaceAZerolengthString, DataExceptionInvalidQueryReplacementString, DataExceptionInvalidRowCountInFetchFirstClause, DataExceptionInvalidRowCountInResultOffsetClause, DataExceptionCharacterNotInRepertoire, DataExceptionIndicatorOverflow, DataExceptionInvalidParameterValue, DataExceptionUnterminatedCString, DataExceptionInvalidEscapeSequence, DataExceptionStringDataLengthMismatch, DataExceptionTrimError, DataExceptionNoncharacterInUCSString, DataExceptionNullValueSubstitutedForMutatorSubjectParameter, DataExceptionArrayElementError, DataExceptionArrayDataRightTruncation, DataExceptionInvalidRepeatArgumentInASampleClause, DataExceptionInvalidSampleSize, DataExceptionIllegalConversion, DataExceptionIllegalComparison, DataExceptionIllegalOperation, IntegrityConstraintViolation, IntegrityConstraintViolationRestrictViolation, IntegrityConstraintNotNullViolation, IntegrityConstraintUniqueViolation, InvalidCursorState, InvalidTransactionState, InvalidTransactionStateActiveSQLtransaction, InvalidTransactionStateBranchTransactionAlreadyActive, InvalidTransactionStateInappropriateAccessModeForBranchTransaction, InvalidTransactionStateInappropriateIsolationLevelForBranchTransaction, InvalidTransactionStateNoActiveSQLtransactionForBranchTransaction, InvalidTransactionStateReadonlySQLtransaction, InvalidTransactionStateSchemaAndDataStatementMixingNotSupported, InvalidTransactionStateHeldCursorRequiresSameIsolationLevel, InvalidSQLStatementName, TriggeredDataChangeViolation, TriggeredDataChangeViolationModifyTableModifiedByDataChangeDeltaTable, InvalidAuthorizationSpecification, DependentPrivilegeDescriptorsStillExist, InvalidCharacterSetName, InvalidTransactionTermination, InvalidConnectionName, SQLRoutineException, SQLRoutineExceptionModifyingSQLdataNotPermitted, SQLRoutineExceptionProhibitedSQLstatementAttempted, SQLRoutineExceptionReadingSQLdataNotPermitted, SQLRoutineExceptionFunctionExecutedNoReturnStatement, InvalidCollationName, InvalidSQLStatementIdentifier, InvalidSQLDescriptorName, InvalidCursorName, InvalidConditionNumber, CursorSensitivityException, CursorSensitivityExceptionRequestRejected, CursorSensitivityExceptionRequestFailed, ExternalRoutineException, ExternalRoutineExceptionContainingSQLNotPermitted, ExternalRoutineExceptionModifyingSQLdataNotPermitted, ExternalRoutineExceptionProhibitedSQLstatementAttempted, ExternalRoutineExceptionReadingSQLdataNotPermitted, ExternalRoutineInvocationException, ExternalRoutineInvocationExceptionNullValueNotAllowed, SavepointException, SavepointExceptionInvalidSpecification, SavepointExceptionTooMany, AmbiguousCursorName, InvalidCatalogName, InvalidSchemaName, TransactionRollback, TransactionRollbackSerializationFailure, TransactionRollbackIntegrityConstraintViolation, TransactionRollbackStatementCompletionUnknown, TransactionRollbackTriggeredActionException, TransactionRollbackDeadlock, SyntaxErrorOrAccessRuleViolation, SyntaxErrorOrAccessRuleViolationSyntaxError, WithCheckOptionViolation, ReservedForISO9579, IoError, NameError, NameErrorNotFound, NameErrorAmbiguous, NameErrorDuplicate, NameErrorTooLong, DdlError, SystemError, } impl SqlState { /// 5 bytes characters called "SQLSTATE". pub fn sqlstate(&self) -> String { self.detail().sqlstate() } /// See [SqlStateCategory](sqlstate_detail::SqlStateCategory) pub fn category(&self) -> SqlStateCategory { self.detail().category() } #[allow(non_snake_case)] fn detail(&self) -> SqlStateDetail { use SqlState::*; let class00 = SqlStateClass::new("00", "successful completion"); let class01 = SqlStateClass::new("01", "warning"); let class02 = SqlStateClass::new("02", "no data"); let class07 = SqlStateClass::new("07", "dynamic SQL error"); let class08 = SqlStateClass::new("08", "connection exception"); let class09 = SqlStateClass::new("09", "triggered action exception"); let class0A = SqlStateClass::new("0A", "feature not supported"); let class0D = SqlStateClass::new("0D", "invalid target type specification"); let class0E = SqlStateClass::new("0E", "invalid schema name list specification"); let class0F = SqlStateClass::new("0F", "locator exception"); let class0L = SqlStateClass::new("0L", "invalid grantor"); let class0M = SqlStateClass::new("0M", "invalid SQL-invoked procedure reference"); let class0P = SqlStateClass::new("0P", "invalid role specification"); let class0S = SqlStateClass::new("0S", "invalid transform group name specification"); let class0T = SqlStateClass::new("0T", "target table disagrees with cursor specification"); let class0U = SqlStateClass::new("0U", "attempt to assign to non-updatable column"); let class0V = SqlStateClass::new("0V", "attempt to assign to ordering column"); let class0W = SqlStateClass::new( "0W", "prohibited statement encountered during trigger execution", ); let class0Z = SqlStateClass::new("0Z", "diagnostics exception"); let class21 = SqlStateClass::new("21", "cardinality violation"); let class22 = SqlStateClass::new("22", "data exception"); let class23 = SqlStateClass::new("23", "integrity constraint violation"); let class24 = SqlStateClass::new("24", "invalid cursor state"); let class25 = SqlStateClass::new("25", "invalid transaction state"); let class26 = SqlStateClass::new("26", "invalid SQL statement name"); let class27 = SqlStateClass::new("27", "triggered data change violation"); let class28 = SqlStateClass::new("28", "invalid authorization specification"); let class2B = SqlStateClass::new("2B", "dependent privilege descriptors still exist"); let class2C = SqlStateClass::new("2C", "invalid character set name"); let class2D = SqlStateClass::new("2D", "invalid transaction termination"); let class2E = SqlStateClass::new("2E", "invalid connection name"); let class2F = SqlStateClass::new("2F", "SQL routine exception"); let class2H = SqlStateClass::new("2H", "invalid collation name"); let class30 = SqlStateClass::new("30", "invalid SQL statement identifier"); let class33 = SqlStateClass::new("33", "invalid SQL descriptor name"); let class34 = SqlStateClass::new("34", "invalid cursor name"); let class35 = SqlStateClass::new("35", "invalid condition number"); let class36 = SqlStateClass::new("36", "cursor sensitivity exception"); let class38 = SqlStateClass::new("38", "external routine exception"); let class39 = SqlStateClass::new("39", "external routine invocation exception"); let class3B = SqlStateClass::new("3B", "savepoint exception"); let class3C = SqlStateClass::new("3C", "ambiguous cursor name"); let class3D = SqlStateClass::new("3D", "invalid catalog name"); let class3F = SqlStateClass::new("3F", "invalid schema name"); let class40 = SqlStateClass::new("40", "transaction rollback"); let class42 = SqlStateClass::new("42", "syntax error or access rule violation"); let class44 = SqlStateClass::new("44", "with check option violation"); let classHZ = SqlStateClass::new("HZ", "Reserved for ISO9579 (RDA)"); // apllodb's original error class (class must starts from [I-Z]) let classIO = SqlStateClass::new("IO", "io error"); let classNM = SqlStateClass::new("NM", "general name error"); let classSC = SqlStateClass::new("SC", "DDL error"); let classSY = SqlStateClass::new("SY", "general system error"); match self { SuccessfulCompletion => SqlStateDetail::new(class00, "000", "(no subclass)"), Warning => SqlStateDetail::new(class01, "000", "(no subclass)"), WarningCursorOperationConflict => { SqlStateDetail::new(class01, "001", "cursor operation conflict") } WarningDisconnectError => SqlStateDetail::new(class01, "002", "disconnect error"), WarningNullValueEliminatedInSetFunction => { SqlStateDetail::new(class01, "003", "null value eliminated in set function") } WarningStringDataRightTruncation => { SqlStateDetail::new(class01, "004", "string data, right truncation") } WarningInsufficientItemDescriptorAreas => { SqlStateDetail::new(class01, "005", "insufficient item descriptor areas") } WarningPrivilegeNotRevoked => { SqlStateDetail::new(class01, "006", "privilege not revoked") } WarningPrivilegeNotGranted => { SqlStateDetail::new(class01, "007", "privilege not granted") } WarningSearchConditionTooLongForInformationSchema => SqlStateDetail::new( class01, "009", "search condition too long for information schema", ), WarningQueryExpressionTooLongForInformationSchema => SqlStateDetail::new( class01, "00A", "query expression too long for information schema", ), WarningDefaultValueTooLongForInformationSchema => SqlStateDetail::new( class01, "00B", "default value too long for information schema", ), WarningResultSetsReturned => { SqlStateDetail::new(class01, "00C", "result sets returned") } WarningAdditionalResultSetsReturned => { SqlStateDetail::new(class01, "00D", "additional result sets returned") } WarningAttemptToReturnTooManyResultSets => { SqlStateDetail::new(class01, "00E", "attempt to return too many result sets") } WarningStatementTooLongForInformationSchema => { SqlStateDetail::new(class01, "00F", "statement too long for information schema") } WarningInvalidNumberOfConditions => { SqlStateDetail::new(class01, "012", "invalid number of conditions") } WarningArrayDataRightTruncation => { SqlStateDetail::new(class01, "02F", "array data, right truncation") } NoData => SqlStateDetail::new(class02, "000", "(no subclass)"), NoDataNoAdditionalResultSetsReturned => { SqlStateDetail::new(class02, "001", "no additional result sets returned") } DynamicSQLError => SqlStateDetail::new(class07, "000", "(no subclass)"), DynamicSQLErrorUsingClauseDoesNotMatchDynamicParameterSpecifications => { SqlStateDetail::new( class07, "001", "using clause does not match dynamic parameter specifications", ) } DynamicSQLErrorUsingClauseDoesNotMatchTargetSpecifications => SqlStateDetail::new( class07, "002", "using clause does not match target specifications", ), DynamicSQLErrorCursorSpecificationCannotBeExecuted => { SqlStateDetail::new(class07, "003", "cursor specification cannot be executed") } DynamicSQLErrorUsingClauseRequiredForDynamicParameters => SqlStateDetail::new( class07, "004", "using clause required for dynamic parameters", ), DynamicSQLErrorPreparedStatementNotACursorSpecification => SqlStateDetail::new( class07, "005", "prepared statement not a cursor specification", ), DynamicSQLErrorRestrictedDataTypeAttributeViolation => { SqlStateDetail::new(class07, "006", "restricted data type attribute violation") } DynamicSQLErrorUsingClauseRequiredForResultFields => { SqlStateDetail::new(class07, "007", "using clause required for result fields") } DynamicSQLErrorInvalidDescriptorCount => { SqlStateDetail::new(class07, "008", "invalid descriptor count") } DynamicSQLErrorInvalidDescriptorIndex => { SqlStateDetail::new(class07, "009", "invalid descriptor index") } DynamicSQLErrorDataTypeTransformFunctionViolation => { SqlStateDetail::new(class07, "00B", "data type transform function violation") } DynamicSQLErrorUndefinedDATAValue => { SqlStateDetail::new(class07, "00C", "undefined DATA value") } DynamicSQLErrorInvalidDATATarget => { SqlStateDetail::new(class07, "00D", "invalid DATA target") } DynamicSQLErrorInvalidLEVELValue => { SqlStateDetail::new(class07, "00E", "invalid LEVEL value") } DynamicSQLErrorInvalidDatetimeIntervalCode => { SqlStateDetail::new(class07, "00F", "invalid DATETIME_INTERVAL_CODE") } ConnectionException => SqlStateDetail::new(class08, "000", "(no subclass)"), ConnectionExceptionSQLclientUnableToEstablishSQLconnection => SqlStateDetail::new( class08, "001", "SQL-client unable to establish SQL-connection", ), ConnectionExceptionConnectionNameInUse => { SqlStateDetail::new(class08, "002", "connection name in use") } ConnectionExceptionConnectionDoesNotExist => { SqlStateDetail::new(class08, "003", "connection does not exist") } ConnectionExceptionSQLserverRejectedEstablishmentOfSQLconnection => { SqlStateDetail::new( class08, "004", "SQL-server rejected establishment of SQL-connection", ) } ConnectionExceptionConnectionFailure => { SqlStateDetail::new(class08, "006", "connection failure") } ConnectionExceptionTransactionResolutionUnknown => { SqlStateDetail::new(class08, "007", "transaction resolution unknown") } ConnectionExceptionDatabaseNotOpen => { SqlStateDetail::new(class08, "I00", "database not open") } ConnectionExceptionDatabaseAlreadyOpen => { SqlStateDetail::new(class08, "I01", "database already open") } TriggeredActionException => SqlStateDetail::new(class09, "000", "(no subclass)"), FeatureNotSupported => SqlStateDetail::new(class0A, "000", "(no subclass)"), FeatureNotSupportedMultipleServerTransactions => { SqlStateDetail::new(class0A, "001", "multiple server transactions") } InvalidTargetTypeSpecification => SqlStateDetail::new(class0D, "000", "(no subclass)"), InvalidSchemaNameListSpecification => { SqlStateDetail::new(class0E, "000", "(no subclass)") } LocatorException => SqlStateDetail::new(class0F, "000", "(no subclass)"), LocatorExceptionInvalidSpecification => { SqlStateDetail::new(class0F, "001", "invalid specification") } InvalidGrantor => SqlStateDetail::new(class0L, "000", "(no subclass)"), InvalidSQLinvokedProcedureReference => { SqlStateDetail::new(class0M, "000", "(no subclass)") } InvalidRoleSpecification => SqlStateDetail::new(class0P, "000", "(no subclass)"), InvalidTransformGroupNameSpecification => { SqlStateDetail::new(class0S, "000", "(no subclass)") } TargetTableDisagreesWithCursorSpecification => { SqlStateDetail::new(class0T, "000", "(no subclass)") } AttemptToAssignToNonupdatableColumn => { SqlStateDetail::new(class0U, "000", "(no subclass)") } AttemptToAssignToOrderingColumn => SqlStateDetail::new(class0V, "000", "(no subclass)"), ProhibitedStatementEncounteredDuringTriggerExecution => { SqlStateDetail::new(class0W, "000", "(no subclass)") } ProhibitedStatementEncounteredDuringTriggerExecutionModifyTableModifiedByDataChangeDeltaTable => { SqlStateDetail::new( class0W, "001", "modify table modified by data change delta table", ) } DiagnosticsException => SqlStateDetail::new(class0Z, "000", "(no subclass)"), DiagnosticsExceptionMaximumNumberOfStackedDiagnosticsAreasExceeded => { SqlStateDetail::new( class0Z, "001", "maximum number of stacked diagnostics areas exceeded", ) } CardinalityViolation => SqlStateDetail::new(class21, "000", "(no subclass)"), DataException => SqlStateDetail::new(class22, "000", "(no subclass)"), DataExceptionStringDataRightTruncation => { SqlStateDetail::new(class22, "001", "string data, right truncation") } DataExceptionNullValueNoIndicatorParameter => { SqlStateDetail::new(class22, "002", "null value, no indicator parameter") } DataExceptionNumericValueOutOfRange => { SqlStateDetail::new(class22, "003", "numeric value out of range") } DataExceptionNullValueNotAllowed => { SqlStateDetail::new(class22, "004", "null value not allowed") } DataExceptionErrorInAssignment => { SqlStateDetail::new(class22, "005", "error in assignment") } DataExceptionInvalidIntervalFormat => { SqlStateDetail::new(class22, "006", "invalid interval format") } DataExceptionInvalidDatetimeFormat => { SqlStateDetail::new(class22, "007", "invalid datetime format") } DataExceptionDatetimeFieldOverflow => { SqlStateDetail::new(class22, "008", "datetime field overflow") } DataExceptionInvalidTimeZoneDisplacementValue => { SqlStateDetail::new(class22, "009", "invalid time zone displacement value") } DataExceptionEscapeCharacterConflict => { SqlStateDetail::new(class22, "00B", "escape character conflict") } DataExceptionInvalidUseOfEscapeCharacter => { SqlStateDetail::new(class22, "00C", "invalid use of escape character") } DataExceptionInvalidEscapeOctet => { SqlStateDetail::new(class22, "00D", "invalid escape octet") } DataExceptionNullValueInArrayTarget => { SqlStateDetail::new(class22, "00E", "null value in array target") } DataExceptionZerolengthCharacterString => { SqlStateDetail::new(class22, "00F", "zero-length character string") } DataExceptionMostSpecificTypeMismatch => { SqlStateDetail::new(class22, "00G", "most specific type mismatch") } DataExceptionSequenceGeneratorLimitExceeded => { SqlStateDetail::new(class22, "00H", "sequence generator limit exceeded") } DataExceptionIntervalValueOutOfRange => { SqlStateDetail::new(class22, "00P", "interval value out of range") } DataExceptionMultisetValueOverflow => { SqlStateDetail::new(class22, "00Q", "multiset value overflow") } DataExceptionInvalidIndicatorParameterValue => { SqlStateDetail::new(class22, "010", "invalid indicator parameter value") } DataExceptionSubstringError => SqlStateDetail::new(class22, "011", "substring error"), DataExceptionDivisionByZero => SqlStateDetail::new(class22, "012", "division by zero"), DataExceptionInvalidPrecedingOrFollowingSizeInWindowFunction => SqlStateDetail::new( class22, "013", "invalid preceding or following size in window function", ), DataExceptionInvalidArgumentForNTILEFunction => { SqlStateDetail::new(class22, "014", "invalid argument for NTILE function") } DataExceptionIntervalFieldOverflow => { SqlStateDetail::new(class22, "015", "interval field overflow") } DataExceptionInvalidArgumentForNthValueFunction => { SqlStateDetail::new(class22, "016", "invalid argument for NTH_VALUE function") } DataExceptionInvalidCharacterValueForCast => { SqlStateDetail::new(class22, "018", "invalid character value for cast") } DataExceptionInvalidEscapeCharacter => { SqlStateDetail::new(class22, "019", "invalid escape character") } DataExceptionInvalidRegularExpression => { SqlStateDetail::new(class22, "01B", "invalid regular expression") } DataExceptionNullRowNotPermittedInTable => { SqlStateDetail::new(class22, "01C", "null row not permitted in table") } DataExceptionInvalidArgumentForNaturalLogarithm => { SqlStateDetail::new(class22, "01E", "invalid argument for natural logarithm") } DataExceptionInvalidArgumentForPowerFunction => { SqlStateDetail::new(class22, "01F", "invalid argument for power function") } DataExceptionInvalidArgumentForWidthBucketFunction => { SqlStateDetail::new(class22, "01G", "invalid argument for width bucket function") } DataExceptionInvalidRowVersion => { SqlStateDetail::new(class22, "01H", "invalid row version") } DataExceptionInvalidQueryRegularExpression => { SqlStateDetail::new(class22, "01S", "invalid Query regular expression") } DataExceptionInvalidQueryOptionFlag => { SqlStateDetail::new(class22, "01T", "invalid Query option flag") } DataExceptionAttemptToReplaceAZerolengthString => { SqlStateDetail::new(class22, "01U", "attempt to replace a zero-length string") } DataExceptionInvalidQueryReplacementString => { SqlStateDetail::new(class22, "01V", "invalid Query replacement string") } DataExceptionInvalidRowCountInFetchFirstClause => { SqlStateDetail::new(class22, "01W", "invalid row count in fetch first clause") } DataExceptionInvalidRowCountInResultOffsetClause => { SqlStateDetail::new(class22, "01X", "invalid row count in result offset clause") } DataExceptionCharacterNotInRepertoire => { SqlStateDetail::new(class22, "021", "character not in repertoire") } DataExceptionIndicatorOverflow => { SqlStateDetail::new(class22, "022", "indicator overflow") } DataExceptionInvalidParameterValue => { SqlStateDetail::new(class22, "023", "invalid parameter value") } DataExceptionUnterminatedCString => { SqlStateDetail::new(class22, "024", "unterminated C string") } DataExceptionInvalidEscapeSequence => { SqlStateDetail::new(class22, "025", "invalid escape sequence") } DataExceptionStringDataLengthMismatch => { SqlStateDetail::new(class22, "026", "string data, length mismatch") } DataExceptionTrimError => SqlStateDetail::new(class22, "027", "trim error"), DataExceptionNoncharacterInUCSString => { SqlStateDetail::new(class22, "029", "noncharacter in UCS string") } DataExceptionNullValueSubstitutedForMutatorSubjectParameter => SqlStateDetail::new( class22, "02D", "null value substituted for mutator subject parameter", ), DataExceptionArrayElementError => { SqlStateDetail::new(class22, "02E", "array element error") } DataExceptionArrayDataRightTruncation => { SqlStateDetail::new(class22, "02F", "array data, right truncation") } DataExceptionInvalidRepeatArgumentInASampleClause => { SqlStateDetail::new(class22, "02G", "invalid repeat argument in a sample clause") } DataExceptionInvalidSampleSize => { SqlStateDetail::new(class22, "02H", "invalid sample size") } DataExceptionIllegalConversion => { SqlStateDetail::new(class22, "I00", "illegal data conversion") } DataExceptionIllegalComparison => { SqlStateDetail::new(class22, "I01", "illegal data comparison") } DataExceptionIllegalOperation => { SqlStateDetail::new(class22, "I02", "illegal operation to data") } IntegrityConstraintViolation => SqlStateDetail::new(class23, "000", "(no subclass)"), IntegrityConstraintViolationRestrictViolation => { SqlStateDetail::new(class23, "001", "restrict violation") } IntegrityConstraintNotNullViolation => { SqlStateDetail::new(class23, "I00", "not null violation") } IntegrityConstraintUniqueViolation => { SqlStateDetail::new(class23, "I01", "unique violation") } InvalidCursorState => SqlStateDetail::new(class24, "000", "(no subclass)"), InvalidTransactionState => SqlStateDetail::new(class25, "000", "(no subclass)"), InvalidTransactionStateActiveSQLtransaction => { SqlStateDetail::new(class25, "001", "active SQL-transaction") } InvalidTransactionStateBranchTransactionAlreadyActive => { SqlStateDetail::new(class25, "002", "branch transaction already active") } InvalidTransactionStateInappropriateAccessModeForBranchTransaction => { SqlStateDetail::new( class25, "003", "inappropriate access mode for branch transaction", ) } InvalidTransactionStateInappropriateIsolationLevelForBranchTransaction => { SqlStateDetail::new( class25, "004", "inappropriate isolation level for branch transaction", ) } InvalidTransactionStateNoActiveSQLtransactionForBranchTransaction => { SqlStateDetail::new( class25, "005", "no active SQL-transaction for branch transaction", ) } InvalidTransactionStateReadonlySQLtransaction => { SqlStateDetail::new(class25, "006", "read-only SQL-transaction") } InvalidTransactionStateSchemaAndDataStatementMixingNotSupported => SqlStateDetail::new( class25, "007", "schema and data statement mixing not supported", ), InvalidTransactionStateHeldCursorRequiresSameIsolationLevel => { SqlStateDetail::new(class25, "008", "held cursor requires same isolation level") } InvalidSQLStatementName => SqlStateDetail::new(class26, "000", "(no subclass)"), TriggeredDataChangeViolation => SqlStateDetail::new(class27, "000", "(no subclass)"), TriggeredDataChangeViolationModifyTableModifiedByDataChangeDeltaTable => { SqlStateDetail::new( class27, "001", "modify table modified by data change delta table", ) } InvalidAuthorizationSpecification => { SqlStateDetail::new(class28, "000", "(no subclass)") } DependentPrivilegeDescriptorsStillExist => { SqlStateDetail::new(class2B, "000", "(no subclass)") } InvalidCharacterSetName => SqlStateDetail::new(class2C, "000", "(no subclass)"), InvalidTransactionTermination => SqlStateDetail::new(class2D, "000", "(no subclass)"), InvalidConnectionName => SqlStateDetail::new(class2E, "000", "(no subclass)"), SQLRoutineException => SqlStateDetail::new(class2F, "000", "(no subclass)"), SQLRoutineExceptionModifyingSQLdataNotPermitted => { SqlStateDetail::new(class2F, "002", "modifying SQL-data not permitted") } SQLRoutineExceptionProhibitedSQLstatementAttempted => { SqlStateDetail::new(class2F, "003", "prohibited SQL-statement attempted") } SQLRoutineExceptionReadingSQLdataNotPermitted => { SqlStateDetail::new(class2F, "004", "reading SQL-data not permitted") } SQLRoutineExceptionFunctionExecutedNoReturnStatement => { SqlStateDetail::new(class2F, "005", "function executed no return statement") } InvalidCollationName => SqlStateDetail::new(class2H, "000", "(no subclass)"), InvalidSQLStatementIdentifier => SqlStateDetail::new(class30, "000", "(no subclass)"), InvalidSQLDescriptorName => SqlStateDetail::new(class33, "000", "(no subclass)"), InvalidCursorName => SqlStateDetail::new(class34, "000", "(no subclass)"), InvalidConditionNumber => SqlStateDetail::new(class35, "000", "(no subclass)"), CursorSensitivityException => SqlStateDetail::new(class36, "000", "(no subclass)"), CursorSensitivityExceptionRequestRejected => { SqlStateDetail::new(class36, "001", "request rejected") } CursorSensitivityExceptionRequestFailed => { SqlStateDetail::new(class36, "002", "request failed") } ExternalRoutineException => SqlStateDetail::new(class38, "000", "(no subclass)"), ExternalRoutineExceptionContainingSQLNotPermitted => { SqlStateDetail::new(class38, "001", "containing SQL not permitted") } ExternalRoutineExceptionModifyingSQLdataNotPermitted => { SqlStateDetail::new(class38, "002", "modifying SQL-data not permitted") } ExternalRoutineExceptionProhibitedSQLstatementAttempted => { SqlStateDetail::new(class38, "003", "prohibited SQL-statement attempted") } ExternalRoutineExceptionReadingSQLdataNotPermitted => { SqlStateDetail::new(class38, "004", "reading SQL-data not permitted") } ExternalRoutineInvocationException => { SqlStateDetail::new(class39, "000", "(no subclass)") } ExternalRoutineInvocationExceptionNullValueNotAllowed => { SqlStateDetail::new(class39, "004", "null value not allowed") } SavepointException => SqlStateDetail::new(class3B, "000", "(no subclass)"), SavepointExceptionInvalidSpecification => { SqlStateDetail::new(class3B, "001", "invalid specification") } SavepointExceptionTooMany => SqlStateDetail::new(class3B, "002", "too many"), AmbiguousCursorName => SqlStateDetail::new(class3C, "000", "(no subclass)"), InvalidCatalogName => SqlStateDetail::new(class3D, "000", "(no subclass)"), InvalidSchemaName => SqlStateDetail::new(class3F, "000", "(no subclass)"), TransactionRollback => SqlStateDetail::new(class40, "000", "(no subclass)"), TransactionRollbackSerializationFailure => { SqlStateDetail::new(class40, "001", "serialization failure") } TransactionRollbackIntegrityConstraintViolation => { SqlStateDetail::new(class40, "002", "integrity constraint violation") } TransactionRollbackStatementCompletionUnknown => { SqlStateDetail::new(class40, "003", "statement completion unknown") } TransactionRollbackTriggeredActionException => { SqlStateDetail::new(class40, "004", "triggered action exception") } TransactionRollbackDeadlock => SqlStateDetail::new(class40, "I00", "deadlock detected"), SyntaxErrorOrAccessRuleViolation => { SqlStateDetail::new(class42, "000", "(no subclass)") } SyntaxErrorOrAccessRuleViolationSyntaxError => { SqlStateDetail::new(class42, "I00", "syntax error") } WithCheckOptionViolation => SqlStateDetail::new(class44, "000", "(no subclass)"), ReservedForISO9579 => SqlStateDetail::new(classHZ, "???", ""), IoError => SqlStateDetail::new(classIO, "000", "(no subclass)"), NameError => SqlStateDetail::new(classNM, "000", "(no subclass)"), NameErrorNotFound => SqlStateDetail::new(classNM, "001", "not found by name"), NameErrorAmbiguous => SqlStateDetail::new(classNM, "002", "ambiguous name"), NameErrorDuplicate => SqlStateDetail::new(classNM, "003", "duplicate name"), NameErrorTooLong => SqlStateDetail::new(classNM, "004", "too long name"), DdlError => SqlStateDetail::new(classSC, "000", "(no subclass)"), SystemError => SqlStateDetail::new(classSY, "000", "(no subclass)"), } } } impl Display for SqlState { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let detail = self.detail(); write!( f, r#"SQLSTATE: "{}", Category: "{}", ClassText: "{}", SubclassText: "{}""#, detail.sqlstate(), detail.category(), &detail.class.class_text, &detail.subclass_text, ) } }
use std::path::{PathBuf}; use std::fs::File; use std::str::FromStr; use std::io::{self, Read, Write}; use std::fmt; use void::Void; use boolinator::Boolinator; use structopt::StructOpt; #[derive(StructOpt, Debug)] #[structopt(name = "pundoc", about = "Convert Markdown to LaTeX / PDF")] pub struct Config { #[structopt(short = "o", long = "out", long = "output", default_value = "-")] pub output: FileOrStdio, #[structopt()] pub input: FileOrStdio, #[structopt(short = "t", long = "to", long = "type", parse(try_from_str = "OutType::from_str"))] pub output_type: Option<OutType>, } impl Config { pub fn normalize(&mut self) { let output = &self.output; self.output_type.get_or_insert_with(|| match output { FileOrStdio::StdIo => OutType::Latex, FileOrStdio::File(path) => { path.extension() .and_then(|s| s.to_str()) .and_then(|s| { (s.eq_ignore_ascii_case("tex") || s.eq_ignore_ascii_case("latex")) .as_some(OutType::Latex) }) .unwrap_or(OutType::Pdf) } }); } } #[derive(Debug)] pub enum FileOrStdio { StdIo, File(PathBuf), } impl FileOrStdio { pub fn to_read(&self) -> Box<Read> { match self { FileOrStdio::StdIo => Box::new(Box::leak(Box::new(io::stdin())).lock()), FileOrStdio::File(path) => Box::new(File::open(path).expect("can't open input source")), } } pub fn to_write(&self) -> Box<Write> { match self { FileOrStdio::StdIo => Box::new(Box::leak(Box::new(io::stdout())).lock()), FileOrStdio::File(path) => Box::new(File::create(path).expect("can't open output source")), } } } impl FromStr for FileOrStdio { type Err = Void; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "" | "-" => Ok(FileOrStdio::StdIo), s => Ok(FileOrStdio::File(PathBuf::from(s))), } } } #[derive(Debug, Clone, Copy)] pub enum OutType { Latex, Pdf, } #[derive(Debug)] struct OutTypeParseError<'a>(&'a str); impl<'a> fmt::Display for OutTypeParseError<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "unknown type `{}`", self.0) } } impl OutType { fn from_str<'a>(s: &'a str) -> Result<OutType, OutTypeParseError<'a>> { let mapping = &[ (&["tex", "latex"][..], OutType::Latex), (&["pdf"][..], OutType::Pdf) ]; for &(list, res) in mapping { for variant in list { if s.eq_ignore_ascii_case(variant) { return Ok(res); } } } Err(OutTypeParseError(s)) } }
// an attribute is metadata applied to some module, crate or item. This // metadata can be used to/for: // // - conditional compilation of code // - set crate name, version and type (binary or library) // - disable lints (warnings) // - enable compile features (macros, glob imports, etc.) // - link to a foreign library // - mark functions as unit tests // - mark functions that will be part of a benchmark // when attributes apply to a whole crate, their syntax is // `#![crate_attribute]`, and when they apply to a module // or item, the syntax is `#[item_attribute]` // attributes can take arguments with different syntaxes // // - #[attribute = "value"] // - #[attribute(key = "value")] // - #[attribute(value)] // // attributes can have multiple values and can be separated over // multiple lines, too: // // #[attribute(value, value2)] // // #[attribute(value, value2, value3, // value4, value5)] fn used_function() {} // `#[allow(dead_code)]` is an attribute that disables the // `dead_code` lint #[allow(dead_code)] fn unused_function() {} #[allow(dead_code)] fn noisy_unused_function() {} // configuration conditional checks are possible through two // different operators: // - the `cfg` attribute: `#[cfg(...)]` in attribute position // - the `cfg!` macro: `cfg!(...)` in boolean expressions // // while the former enables conditional compilation, the latter // conditionally evaluates to `true` or `false` literals allowing // for checks at run-time. Both utilize identical argument syntax #[cfg(target_os = "linux")] fn are_you_on_linux() { println!("You are running linux"); } #[cfg(some_condition)] fn conditional_function() { println!("condition met!"); } fn main() { used_function(); are_you_on_linux(); println!("Are you sure?"); if cfg!(target_os = "linux") { println!("Yes. It's definitely linux!"); } else { println!("Yes. It's definitely *not* linux!"); } conditional_function(); }
use std::cmp::Ord; use std::ops::Deref; use anyhow::{anyhow, Result}; use itertools::Itertools as _; use crate::{utils, Challenge}; pub struct Day01; impl Challenge for Day01 { const DAY_NUMBER: u32 = 1; type InputType = SortedVec<u32>; type OutputType = u32; fn part1(input: &Self::InputType) -> Result<Self::OutputType> { input .iter() .copied() .tuple_combinations() .find(|(a, b)| a + b == 2020) .map(|(a, b)| a * b) .ok_or_else(|| anyhow!("Could not find combination")) } fn part2(input: &Self::InputType) -> Result<Self::OutputType> { input .iter() .copied() .tuple_combinations() .find(|(a, b, c)| a + b + c == 2020) .map(|(a, b, c)| a * b * c) .ok_or_else(|| anyhow!("Could not find combination")) } fn parse(content: &str) -> Result<Self::InputType> { utils::parse_line_separated_list(content) .map(SortedVec::new) .map_err(Into::into) } } #[derive(Clone, Debug)] pub struct SortedVec<T>(Vec<T>); impl<T> SortedVec<T> where T: Ord, { pub fn new(mut inner: Vec<T>) -> Self { inner.sort(); Self(inner) } } impl<T> Deref for SortedVec<T> { type Target = Vec<T>; fn deref(&self) -> &Self::Target { &self.0 } } #[cfg(test)] mod tests { use super::*; static NUMBERS: &[u32] = &[1721, 979, 366, 299, 675, 1456]; #[test] fn test_part1() { assert_eq!( Day01::part1(&SortedVec::new(NUMBERS.to_owned())).unwrap(), 514579 ); } #[test] fn test_solve2() { assert_eq!( Day01::part2(&SortedVec::new(NUMBERS.to_owned())).unwrap(), 241861950 ); } } crate::benchmark_challenge!(crate::day01::Day01);
#[doc = "Reader of register CLK_TRIM_ECO_CTL"] pub type R = crate::R<u32, super::CLK_TRIM_ECO_CTL>; #[doc = "Writer for register CLK_TRIM_ECO_CTL"] pub type W = crate::W<u32, super::CLK_TRIM_ECO_CTL>; #[doc = "Register CLK_TRIM_ECO_CTL `reset()`'s with value 0x001f_0003"] impl crate::ResetValue for super::CLK_TRIM_ECO_CTL { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x001f_0003 } } #[doc = "Reader of field `WDTRIM`"] pub type WDTRIM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `WDTRIM`"] pub struct WDTRIM_W<'a> { w: &'a mut W, } impl<'a> WDTRIM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x07) | ((value as u32) & 0x07); self.w } } #[doc = "Reader of field `ATRIM`"] pub type ATRIM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `ATRIM`"] pub struct ATRIM_W<'a> { w: &'a mut W, } impl<'a> ATRIM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 4)) | (((value as u32) & 0x0f) << 4); self.w } } #[doc = "Reader of field `FTRIM`"] pub type FTRIM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `FTRIM`"] pub struct FTRIM_W<'a> { w: &'a mut W, } impl<'a> FTRIM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u32) & 0x03) << 8); self.w } } #[doc = "Reader of field `RTRIM`"] pub type RTRIM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `RTRIM`"] pub struct RTRIM_W<'a> { w: &'a mut W, } impl<'a> RTRIM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 10)) | (((value as u32) & 0x03) << 10); self.w } } #[doc = "Reader of field `GTRIM`"] pub type GTRIM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `GTRIM`"] pub struct GTRIM_W<'a> { w: &'a mut W, } impl<'a> GTRIM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 12)) | (((value as u32) & 0x03) << 12); self.w } } #[doc = "Reader of field `ITRIM`"] pub type ITRIM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `ITRIM`"] pub struct ITRIM_W<'a> { w: &'a mut W, } impl<'a> ITRIM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x3f << 16)) | (((value as u32) & 0x3f) << 16); self.w } } impl R { #[doc = "Bits 0:2 - Watch Dog Trim - Delta voltage below stead state level 0x0 - 50mV 0x1 - 75mV 0x2 - 100mV 0x3 - 125mV 0x4 - 150mV 0x5 - 175mV 0x6 - 200mV 0x7 - 225mV"] #[inline(always)] pub fn wdtrim(&self) -> WDTRIM_R { WDTRIM_R::new((self.bits & 0x07) as u8) } #[doc = "Bits 4:7 - Amplitude trim to set the crystal drive level when ECO_CONFIG.AGC_EN=1. WARNING: use care when setting this field because driving a crystal beyond its rated limit can permanently damage the crystal. 0x0 - 150mV 0x1 - 175mV 0x2 - 200mV 0x3 - 225mV 0x4 - 250mV 0x5 - 275mV 0x6 - 300mV 0x7 - 325mV 0x8 - 350mV 0x9 - 375mV 0xA - 400mV 0xB - 425mV 0xC - 450mV 0xD - 475mV 0xE - 500mV 0xF - 525mV"] #[inline(always)] pub fn atrim(&self) -> ATRIM_R { ATRIM_R::new(((self.bits >> 4) & 0x0f) as u8) } #[doc = "Bits 8:9 - Filter Trim - 3rd harmonic oscillation"] #[inline(always)] pub fn ftrim(&self) -> FTRIM_R { FTRIM_R::new(((self.bits >> 8) & 0x03) as u8) } #[doc = "Bits 10:11 - Feedback resistor Trim"] #[inline(always)] pub fn rtrim(&self) -> RTRIM_R { RTRIM_R::new(((self.bits >> 10) & 0x03) as u8) } #[doc = "Bits 12:13 - Gain Trim - Startup time"] #[inline(always)] pub fn gtrim(&self) -> GTRIM_R { GTRIM_R::new(((self.bits >> 12) & 0x03) as u8) } #[doc = "Bits 16:21 - Current Trim"] #[inline(always)] pub fn itrim(&self) -> ITRIM_R { ITRIM_R::new(((self.bits >> 16) & 0x3f) as u8) } } impl W { #[doc = "Bits 0:2 - Watch Dog Trim - Delta voltage below stead state level 0x0 - 50mV 0x1 - 75mV 0x2 - 100mV 0x3 - 125mV 0x4 - 150mV 0x5 - 175mV 0x6 - 200mV 0x7 - 225mV"] #[inline(always)] pub fn wdtrim(&mut self) -> WDTRIM_W { WDTRIM_W { w: self } } #[doc = "Bits 4:7 - Amplitude trim to set the crystal drive level when ECO_CONFIG.AGC_EN=1. WARNING: use care when setting this field because driving a crystal beyond its rated limit can permanently damage the crystal. 0x0 - 150mV 0x1 - 175mV 0x2 - 200mV 0x3 - 225mV 0x4 - 250mV 0x5 - 275mV 0x6 - 300mV 0x7 - 325mV 0x8 - 350mV 0x9 - 375mV 0xA - 400mV 0xB - 425mV 0xC - 450mV 0xD - 475mV 0xE - 500mV 0xF - 525mV"] #[inline(always)] pub fn atrim(&mut self) -> ATRIM_W { ATRIM_W { w: self } } #[doc = "Bits 8:9 - Filter Trim - 3rd harmonic oscillation"] #[inline(always)] pub fn ftrim(&mut self) -> FTRIM_W { FTRIM_W { w: self } } #[doc = "Bits 10:11 - Feedback resistor Trim"] #[inline(always)] pub fn rtrim(&mut self) -> RTRIM_W { RTRIM_W { w: self } } #[doc = "Bits 12:13 - Gain Trim - Startup time"] #[inline(always)] pub fn gtrim(&mut self) -> GTRIM_W { GTRIM_W { w: self } } #[doc = "Bits 16:21 - Current Trim"] #[inline(always)] pub fn itrim(&mut self) -> ITRIM_W { ITRIM_W { w: self } } }
use artell_domain::scheduler::{Scheduler, SchedulerRepository}; #[derive(Debug, Error)] pub enum Error { #[error("scheduler is not initialized yet")] SchedulerNotInitialized, #[error(transparent)] Others(#[from] anyhow::Error), } pub async fn execute(scheduler_repo: impl SchedulerRepository) -> Result<Scheduler, Error> { let scheduler = scheduler_repo .find() .await? .ok_or(Error::SchedulerNotInitialized)?; Ok(scheduler) }
use crate::repo_wrapper::RepoSourceWrapper; use crypto::digest::Digest; use crypto::sha1::Sha1; use fnv::FnvHashMap; use fs_extra::dir::{copy, remove, CopyOptions}; use git2::Repository; use log; use log::{debug, info, warn}; use reqwest; pub use reups::DBBuilderTrait; pub use reups_lib as reups; use std::collections::{HashMap, HashSet}; use std::io::{BufWriter, Write}; use std::iter::FromIterator; pub use std::path::PathBuf; use std::str; use tempdir::TempDir; use time; use yaml_rust; pub struct RegenOptions { pub branches: Option<Vec<String>>, pub local_yaml: Option<PathBuf>, pub clone_root: String, pub install_root: String, pub version: String, pub build_tool: String, pub tag: Option<String>, pub remote_package_url: String, } pub struct Regenerate<'a> { product_urls: RepoSourceWrapper, graph: reups::graph::Graph, db: &'a mut reups::DB, repo_map: HashMap<String, Repository>, branches: Vec<String>, options: RegenOptions, build_completed: HashSet<String>, build_log: BufWriter<std::fs::File>, } impl<'a> Regenerate<'a> { pub fn new(db: &'a mut reups::DB, options: RegenOptions) -> Result<Regenerate<'a>, String> { // get the mapping from defined url debug!("Fetching remote package list"); let mut response = reqwest::get(options.remote_package_url.as_str()).unwrap(); let mapping = if response.status().is_success() { let body = response.text().unwrap(); let mut parsed = yaml_rust::YamlLoader::load_from_str(&body).unwrap(); // This is not using multi paged yaml, so just take the first parsed.remove(0) } else { return Err("There was a problem fetch or parsing the remote map".to_string()); }; let repo_map = HashMap::new(); let mut br = vec!["master".to_string()]; if let Some(in_br) = options.branches.as_ref() { br = [&in_br[..], &br[..]].concat(); } let f = std::fs::File::create(format!("build_log-{}.log", time::now().rfc3339())) .or_else(|e| return Err(format!("{}", e)))?; Ok(Regenerate { product_urls: RepoSourceWrapper::new(mapping, &options.local_yaml), db: db, graph: reups::graph::Graph::new(), repo_map, branches: br, options: options, build_completed: HashSet::new(), build_log: BufWriter::new(f), }) } fn get_or_clone_repo(&mut self, product: &str) -> Result<(), String> { let repo_src = match self.product_urls.get_url(product) { Some(x) => x, None => return Err("No url for associated product".to_string()), }; let mut on_disk = PathBuf::from(&self.options.clone_root); on_disk.push(product); let repo = match if on_disk.exists() { debug!( "Using repo found on disk for {} at {}", product, &on_disk.to_str().unwrap() ); match Repository::open(&on_disk) { Ok(x) => Ok(x), Err(_) => { warn!("There was a problem opening the on disk repo for {}, removing and re-cloning", product); let _ = remove(&on_disk); Repository::clone(repo_src, on_disk) .or_else(|e| panic!("Failed to clone: {}", e)) } } } else { debug!("Cloning {} from {}", product, repo_src); Repository::clone(repo_src, on_disk) } { Ok(repo) => repo, Err(e) => panic!("Failed to clone: {}", e), }; self.repo_map.insert(product.to_string(), repo); Ok(()) } fn checkout_branch(&self, repo_name: &str) -> Result<(), String> { let repo = self.repo_map.get(repo_name).unwrap(); let mut success = false; // if the product is not based on master, replace the branches list // with one that contains the base branch instead of master let branches = if let Some(name) = self.product_urls.has_ref(repo_name) { let mut b: Vec<String> = self .branches .iter() .filter_map(|x| { if x != &"master".to_string() { Some(x.clone()) } else { None } }) .collect(); b.push(name); b } else { self.branches.clone() }; for name in branches.iter() { debug!( "Trying to checkout {} in {}", name, repo.workdir().unwrap().to_str().unwrap() ); let tree = match repo.revparse_single(name) { Ok(x) => x, Err(_) => continue, }; match repo.checkout_tree(&tree, None) { Ok(_) => (), Err(_) => continue, }; let head = match tree.kind() { Some(k) => match k { git2::ObjectType::Tag => format!("refs/tags/{}", name), _ => format!("refs/remotes/{}", name), }, None => panic!("No target for specified name"), }; match repo.set_head(&head) { Ok(x) => x, Err(e) => { return Err(format!( "Could not set {} to branch {} error {}", repo_name, name, e )) } } success = true; break; } match success { true => Ok(()), false => Err(format!("Could not find branch to checkout")), } } fn get_sha_of_head(&self, name: &str) -> Result<String, String> { let repo = self.repo_map.get(name).unwrap(); let head = match repo.head() { Ok(v) => v, Err(e) => return Err(format!("{}", e)), }; let target = head.target().unwrap(); Ok(format!("{}", target)) } fn graph_repo(&mut self, name: &str, node_type: reups::graph::NodeType) { let location = { let repo = self.repo_map.get(name).unwrap(); self.graph .add_or_update_product(name.to_string(), node_type); repo.workdir().unwrap().clone().to_path_buf() }; let mut table_file = location.clone(); table_file.push(format!("ups/{}.table", name)); let table = reups::table::Table::from_file(name.to_string(), table_file, location.to_path_buf()) .unwrap(); use reups::graph::NodeType; for (dep_map, node_type) in vec![ &table.inexact.as_ref().unwrap().required, //&table.inexact.as_ref().unwrap().optional, ] .iter() .zip(vec![ NodeType::Required, // NodeType::Optional ]) { for (dep_name, _) in dep_map.iter() { let product_added = self.graph.has_product(dep_name); if !product_added { let _ = self.get_or_clone_repo(dep_name); let _ = self.checkout_branch(dep_name); self.graph_repo(dep_name, node_type.clone()) } let sha = self.get_sha_of_head(dep_name).unwrap(); let _ = self .graph .connect_products(&name.to_string(), dep_name, sha); } } } fn make_product_id(&self, product: &str) -> Result<String, String> { let mut hasher = Sha1::new(); for node in self.graph.dfs_post_order(product)? { let hashes = self.graph.product_versions(&self.graph.get_name(node)); let hash = match hashes.len() { 0 => { let name = self.graph.get_name(node); self.get_sha_of_head(&name).unwrap() } _ => hashes[0].clone(), }; hasher.input(hash.as_bytes()); } let id = hasher.result_str(); Ok(id) } fn accumulate_env( &self, product: &str, product_repo: &PathBuf, products: &Vec<String>, ) -> Result<FnvHashMap<String, String>, String> { debug!("Building env for {}", product); let mut env_vars = FnvHashMap::default(); dbg!(product_repo); for node_name in products.iter() { debug!("Looking at node {}", node_name); let node_id = self.make_product_id(node_name)?; // get the table for the node, this presupposes all products have been // declared except the product being installed let (table, db_path) = if node_name == product { debug!("Product not in db, local setup"); let mut table_path = product_repo.clone(); table_path.push("ups"); table_path.push(format!("{}.table", product)); match reups::table::Table::from_file( product.to_string(), table_path.clone(), product_repo.clone(), ) { Ok(x) => ( x, PathBuf::from(format!( "LOCAL:{}", table_path .to_str() .ok_or("cant convert table path to str")? )), ), Err(e) => return Err(format!("{}", e)), } } else { ( self.db .get_table_from_identity(node_name, &node_id) .ok_or(format!( "Issue looking up table for {}, was it declared?", node_name ))?, self.db .get_database_path_from_version(node_name, &self.options.version), ) }; reups::setup_table( &self.options.version, &table, &mut env_vars, true, &reups::SYSTEM_OS.to_string(), db_path, false, ); } Ok(env_vars) } fn build_product( &mut self, product: &str, product_dir: &PathBuf, repo_path: &PathBuf, env_vars: &FnvHashMap<String, String>, ) { info!("Building {}", product); debug!("Using environment {:#?} for building", env_vars); let _ = self .build_log .write_all(format!("Building {}\n", product).as_bytes()); dbg!(product_dir); dbg!(&repo_path); for verb in ["fetch", "prep", "config", "build", "install"].iter() { debug!("Running build tool verb {}", verb); let _ = self .build_log .write_all(format!("Running build tool verb {}\n", verb).as_bytes()); let output = std::process::Command::new(&self.options.build_tool) .args(&[ format!("PRODUCT={}", product), format!("VERSION={}", self.options.version), format!("FLAVOR={}", reups::SYSTEM_OS), format!("PREFIX={}", &product_dir.to_str().unwrap()), verb.to_string(), ]) .current_dir(&repo_path) .envs(env_vars) .output(); match output { Ok(o) => { let _ = self .build_log .write_all(format!("Process exited with status {}\n", o.status).as_bytes()); let _ = self.build_log.write_all("Process stdout:\n".as_bytes()); let _ = self.build_log.write_all(&o.stdout); let _ = self.build_log.write_all("\n".as_bytes()); let _ = self.build_log.write_all("Process stderr:\n".as_bytes()); let _ = self.build_log.write_all(&o.stderr); let _ = self.build_log.write_all("\n".as_bytes()); if !o.status.success() { panic!("{:#?}", o); } else { debug!("{:#?}", o.status); () } } Err(e) => { panic!("Building failed with error {}", e); } } } } pub fn install_product(&mut self, product: &str) -> Result<(), String> { // clone product // checkout branch // graph repo (VERIFY BRANCH IS PRESENT IN AT LEAST ONE RPO) // make product id // verify product id is not in database, if so short circuit and declare // loop through graph dfs and build // create directory to install in // change to repo working dir // issue eupspkg build comamnds // declare to systemdb // declare to remote db? info!("Installing product {}", product); self.get_or_clone_repo(product)?; self.checkout_branch(product)?; self.graph_repo(product, reups::graph::NodeType::Required); self.install_product_impl(product) } fn install_product_impl(&mut self, product: &str) -> Result<(), String> { // short circuit if this has already been built if self.build_completed.contains(product) { return Ok(()); } let product_id = self.make_product_id(product)?; let table = if self.db.has_identity(product, &product_id) { info!( "Database has product {} with id {}, using that for the build", product, &product_id ); // Get the path to an existing product if that is to be used self.db .get_table_from_identity(product, &product_id) .ok_or(format!( "Error retrieving up table for {} in database", product ))? } else { info!("Doing a source build for {}", product); // record all dependencies into a vector, as it is cheaper to loop through // that than do a dfs iteration multiple times let mut names = vec![]; let mut has_python = false; for node in self.graph.dfs_post_order(product)? { let node_name = self.graph.get_name(node); if node_name == "scipipe_conda" { has_python = true } names.push(node_name); } // for now force the python env to be a dependency of everything except // the environment and base conda, this ensures the environment is setup // this is not a good long terms solution but is useful for just testing if !HashSet::<&&str>::from_iter(["miniconda_lsst", "scipipe_conda"].iter()) .contains(&product) && !has_python { names.insert(0, "scipipe_conda".to_string()) } debug!("Product {} has dependencies {:?}", product, &names); // make sure all the dependencies are already installed, making sure // to skip the product currently being installed (ie the last element // in the dfs for name in names.iter() { // this product will be in the dfs graph, so skip it and finish // this function info!("Processing dependency {}", name); if name != product { self.install_product_impl(&name)?; } } // determine the product directory to install to, and make sure it is // created let mut product_dir = PathBuf::from(&self.options.install_root); product_dir.push(product); product_dir.push(&self.options.version); debug!( "Creating directory {} for {} installation", product_dir.to_str().unwrap(), product ); match std::fs::create_dir_all(&product_dir) { Ok(_) => (), Err(e) => return Err(format!("{}", e)), } debug!("Done creating"); product_dir = product_dir .canonicalize() .or_else(|e| return Err(format!("{}", e)))?; // get the path to the build directory let repo_path = self .repo_map .get(product) .ok_or("no product of specified name found")? .workdir() .ok_or("The speficied product has no working directory")? .canonicalize() .or_else(|_| return Err(format!("Problem expanding abs path for {}", product)))? .to_str() .ok_or("Problem turning path into str")? .to_string(); // look if the product should be built in a temporary path let mut upstream = PathBuf::from(&repo_path); upstream.push("upstream"); let tmp_dir = TempDir::new(product).unwrap(); let mut tmp_dir_path = PathBuf::from(tmp_dir.path()); let repo_path = if upstream.exists() { debug!("Product is a upstream build, copy to tmp directory"); let _ = copy(repo_path, &tmp_dir_path, &CopyOptions::new()); tmp_dir_path.push(product); tmp_dir_path } else { drop(tmp_dir); PathBuf::from(repo_path) }; // accumulate the environment varibales let env_vars = self.accumulate_env(product, &repo_path, &names)?; // remove and trace that this might have been previously prepaired let mut prep_path = PathBuf::from(&repo_path); prep_path.push("upstream"); prep_path.push("prepared"); if prep_path.exists() { let _ = std::fs::remove_file(prep_path); } // issue the build commands self.build_product(product, &product_dir, &repo_path, &env_vars); // remove the git folder form product_dir let mut git_path = product_dir.clone(); git_path.push(".git"); if git_path.exists() { debug!("Removing git directory from installation"); match remove(git_path) { Ok(_) => (), Err(e) => return Err(format!("{}", e)), }; } let product_pathbuf = PathBuf::from(&product_dir); let mut table_path = product_pathbuf.clone(); table_path.push("ups"); table_path.push(format!("{}.table", product)); let table = match reups::table::Table::from_file( product.to_string(), table_path.clone(), product_pathbuf, ) { Ok(x) => x, Err(e) => return Err(format!("{}", e)), }; table }; // get the table for the product // declare the results to the database let tmp_tag = match self.options.tag.as_ref() { Some(t) => Some(t.as_str()), None => None, }; info!("Declaring {}", product); let product_dir = table.product_dir.clone(); let declare_product = reups::DeclareInputs { product, prod_dir: &product_dir, version: &self.options.version, tag: tmp_tag, ident: Some(product_id.as_str()), flavor: Some(reups::SYSTEM_OS), table: Some(table), relative: false, }; let res = self.db.declare(vec![declare_product], None); debug!("The results of declare are{:#?}", res); // add this product to the build completed set, so that when // multiple packages depend on this package it will not be // built twice self.build_completed.insert(product.to_string()); Ok(()) } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PartnerResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub etag: Option<i64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<PartnerProperties>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PartnerProperties { #[serde(rename = "partnerId", default, skip_serializing_if = "Option::is_none")] pub partner_id: Option<String>, #[serde(rename = "partnerName", default, skip_serializing_if = "Option::is_none")] pub partner_name: Option<String>, #[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")] pub tenant_id: Option<String>, #[serde(rename = "objectId", default, skip_serializing_if = "Option::is_none")] pub object_id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub version: Option<i32>, #[serde(rename = "updatedTime", default, skip_serializing_if = "Option::is_none")] pub updated_time: Option<String>, #[serde(rename = "createdTime", default, skip_serializing_if = "Option::is_none")] pub created_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub state: Option<ManagementPartnerState>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ManagementPartnerState { Active, Deleted, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ErrorResponseCode { NotFound, Conflict, BadRequest, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Error { #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<ExtendedErrorInfo>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ExtendedErrorInfo { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<ErrorResponseCode>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OperationList { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<OperationResponse>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OperationResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub display: Option<OperationDisplay>, #[serde(default, skip_serializing_if = "Option::is_none")] pub origin: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OperationDisplay { #[serde(default, skip_serializing_if = "Option::is_none")] pub provider: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub operation: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, }
#[doc = "Register `GICD_PIDR0` reader"] pub type R = crate::R<GICD_PIDR0_SPEC>; #[doc = "Field `PIDR0` reader - PIDR0"] pub type PIDR0_R = crate::FieldReader<u32>; impl R { #[doc = "Bits 0:31 - PIDR0"] #[inline(always)] pub fn pidr0(&self) -> PIDR0_R { PIDR0_R::new(self.bits) } } #[doc = "GICD peripheral ID0 register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`gicd_pidr0::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct GICD_PIDR0_SPEC; impl crate::RegisterSpec for GICD_PIDR0_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`gicd_pidr0::R`](R) reader structure"] impl crate::Readable for GICD_PIDR0_SPEC {} #[doc = "`reset()` method sets GICD_PIDR0 to value 0x90"] impl crate::Resettable for GICD_PIDR0_SPEC { const RESET_VALUE: Self::Ux = 0x90; }
#[doc = r"Register block"] #[repr(C)] pub struct RegisterBlock { #[doc = "0x00 - GPIO port mode register"] pub gpiox_moder: GPIOX_MODER, #[doc = "0x04 - GPIO port output type register"] pub gpiox_otyper: GPIOX_OTYPER, #[doc = "0x08 - GPIO port output speed register"] pub gpiox_ospeedr: GPIOX_OSPEEDR, #[doc = "0x0c - GPIO port pull-up/pull-down register"] pub gpiox_pupdr: GPIOX_PUPDR, #[doc = "0x10 - GPIO port input data register"] pub gpiox_idr: GPIOX_IDR, #[doc = "0x14 - GPIO port output data register"] pub gpiox_odr: GPIOX_ODR, #[doc = "0x18 - GPIO port bit set/reset register"] pub gpiox_bsrr: GPIOX_BSRR, #[doc = "0x1c - This register is used to lock the configuration of the port bits when a correct write sequence is applied to bit 16 (LCKK). The value of bits \\[15:0\\] is used to lock the configuration of the GPIO. During the write sequence, the value of LCKR\\[15:0\\] must not change. When the LOCK sequence has been applied on a port bit, the value of this port bit can no longer be modified until the next MCU reset or peripheral reset. A specific write sequence is used to write to the GPIOx_LCKR register. Only word access (32-bit long) is allowed during this locking sequence. Each lock bit freezes a specific configuration register (control and alternate function registers)."] pub gpiox_lckr: GPIOX_LCKR, #[doc = "0x20 - GPIO alternate function low register"] pub gpiox_afrl: GPIOX_AFRL, #[doc = "0x24 - GPIO alternate function high register"] pub gpiox_afrh: GPIOX_AFRH, _reserved10: [u8; 972usize], #[doc = "0x3f4 - GPIO version register"] pub gpiox_verr: GPIOX_VERR, #[doc = "0x3f8 - GPIO identification register"] pub gpiox_ipidr: GPIOX_IPIDR, #[doc = "0x3fc - GPIO size identification register"] pub gpiox_sidr: GPIOX_SIDR, } #[doc = "GPIO port mode register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [gpiox_moder](gpiox_moder) module"] pub type GPIOX_MODER = crate::Reg<u32, _GPIOX_MODER>; #[allow(missing_docs)] #[doc(hidden)] pub struct _GPIOX_MODER; #[doc = "`read()` method returns [gpiox_moder::R](gpiox_moder::R) reader structure"] impl crate::Readable for GPIOX_MODER {} #[doc = "`write(|w| ..)` method takes [gpiox_moder::W](gpiox_moder::W) writer structure"] impl crate::Writable for GPIOX_MODER {} #[doc = "GPIO port mode register"] pub mod gpiox_moder; #[doc = "GPIO port output type register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [gpiox_otyper](gpiox_otyper) module"] pub type GPIOX_OTYPER = crate::Reg<u32, _GPIOX_OTYPER>; #[allow(missing_docs)] #[doc(hidden)] pub struct _GPIOX_OTYPER; #[doc = "`read()` method returns [gpiox_otyper::R](gpiox_otyper::R) reader structure"] impl crate::Readable for GPIOX_OTYPER {} #[doc = "`write(|w| ..)` method takes [gpiox_otyper::W](gpiox_otyper::W) writer structure"] impl crate::Writable for GPIOX_OTYPER {} #[doc = "GPIO port output type register"] pub mod gpiox_otyper; #[doc = "GPIO port output speed register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [gpiox_ospeedr](gpiox_ospeedr) module"] pub type GPIOX_OSPEEDR = crate::Reg<u32, _GPIOX_OSPEEDR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _GPIOX_OSPEEDR; #[doc = "`read()` method returns [gpiox_ospeedr::R](gpiox_ospeedr::R) reader structure"] impl crate::Readable for GPIOX_OSPEEDR {} #[doc = "`write(|w| ..)` method takes [gpiox_ospeedr::W](gpiox_ospeedr::W) writer structure"] impl crate::Writable for GPIOX_OSPEEDR {} #[doc = "GPIO port output speed register"] pub mod gpiox_ospeedr; #[doc = "GPIO port pull-up/pull-down register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [gpiox_pupdr](gpiox_pupdr) module"] pub type GPIOX_PUPDR = crate::Reg<u32, _GPIOX_PUPDR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _GPIOX_PUPDR; #[doc = "`read()` method returns [gpiox_pupdr::R](gpiox_pupdr::R) reader structure"] impl crate::Readable for GPIOX_PUPDR {} #[doc = "`write(|w| ..)` method takes [gpiox_pupdr::W](gpiox_pupdr::W) writer structure"] impl crate::Writable for GPIOX_PUPDR {} #[doc = "GPIO port pull-up/pull-down register"] pub mod gpiox_pupdr; #[doc = "GPIO port input data register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [gpiox_idr](gpiox_idr) module"] pub type GPIOX_IDR = crate::Reg<u32, _GPIOX_IDR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _GPIOX_IDR; #[doc = "`read()` method returns [gpiox_idr::R](gpiox_idr::R) reader structure"] impl crate::Readable for GPIOX_IDR {} #[doc = "GPIO port input data register"] pub mod gpiox_idr; #[doc = "GPIO port output data register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [gpiox_odr](gpiox_odr) module"] pub type GPIOX_ODR = crate::Reg<u32, _GPIOX_ODR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _GPIOX_ODR; #[doc = "`read()` method returns [gpiox_odr::R](gpiox_odr::R) reader structure"] impl crate::Readable for GPIOX_ODR {} #[doc = "`write(|w| ..)` method takes [gpiox_odr::W](gpiox_odr::W) writer structure"] impl crate::Writable for GPIOX_ODR {} #[doc = "GPIO port output data register"] pub mod gpiox_odr; #[doc = "GPIO port bit set/reset register\n\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [gpiox_bsrr](gpiox_bsrr) module"] pub type GPIOX_BSRR = crate::Reg<u32, _GPIOX_BSRR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _GPIOX_BSRR; #[doc = "`write(|w| ..)` method takes [gpiox_bsrr::W](gpiox_bsrr::W) writer structure"] impl crate::Writable for GPIOX_BSRR {} #[doc = "GPIO port bit set/reset register"] pub mod gpiox_bsrr; #[doc = "This register is used to lock the configuration of the port bits when a correct write sequence is applied to bit 16 (LCKK). The value of bits \\[15:0\\] is used to lock the configuration of the GPIO. During the write sequence, the value of LCKR\\[15:0\\] must not change. When the LOCK sequence has been applied on a port bit, the value of this port bit can no longer be modified until the next MCU reset or peripheral reset. A specific write sequence is used to write to the GPIOx_LCKR register. Only word access (32-bit long) is allowed during this locking sequence. Each lock bit freezes a specific configuration register (control and alternate function registers).\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [gpiox_lckr](gpiox_lckr) module"] pub type GPIOX_LCKR = crate::Reg<u32, _GPIOX_LCKR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _GPIOX_LCKR; #[doc = "`read()` method returns [gpiox_lckr::R](gpiox_lckr::R) reader structure"] impl crate::Readable for GPIOX_LCKR {} #[doc = "`write(|w| ..)` method takes [gpiox_lckr::W](gpiox_lckr::W) writer structure"] impl crate::Writable for GPIOX_LCKR {} #[doc = "This register is used to lock the configuration of the port bits when a correct write sequence is applied to bit 16 (LCKK). The value of bits \\[15:0\\] is used to lock the configuration of the GPIO. During the write sequence, the value of LCKR\\[15:0\\] must not change. When the LOCK sequence has been applied on a port bit, the value of this port bit can no longer be modified until the next MCU reset or peripheral reset. A specific write sequence is used to write to the GPIOx_LCKR register. Only word access (32-bit long) is allowed during this locking sequence. Each lock bit freezes a specific configuration register (control and alternate function registers)."] pub mod gpiox_lckr; #[doc = "GPIO alternate function low register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [gpiox_afrl](gpiox_afrl) module"] pub type GPIOX_AFRL = crate::Reg<u32, _GPIOX_AFRL>; #[allow(missing_docs)] #[doc(hidden)] pub struct _GPIOX_AFRL; #[doc = "`read()` method returns [gpiox_afrl::R](gpiox_afrl::R) reader structure"] impl crate::Readable for GPIOX_AFRL {} #[doc = "`write(|w| ..)` method takes [gpiox_afrl::W](gpiox_afrl::W) writer structure"] impl crate::Writable for GPIOX_AFRL {} #[doc = "GPIO alternate function low register"] pub mod gpiox_afrl; #[doc = "GPIO alternate function high register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [gpiox_afrh](gpiox_afrh) module"] pub type GPIOX_AFRH = crate::Reg<u32, _GPIOX_AFRH>; #[allow(missing_docs)] #[doc(hidden)] pub struct _GPIOX_AFRH; #[doc = "`read()` method returns [gpiox_afrh::R](gpiox_afrh::R) reader structure"] impl crate::Readable for GPIOX_AFRH {} #[doc = "`write(|w| ..)` method takes [gpiox_afrh::W](gpiox_afrh::W) writer structure"] impl crate::Writable for GPIOX_AFRH {} #[doc = "GPIO alternate function high register"] pub mod gpiox_afrh; #[doc = "GPIO version register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [gpiox_verr](gpiox_verr) module"] pub type GPIOX_VERR = crate::Reg<u32, _GPIOX_VERR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _GPIOX_VERR; #[doc = "`read()` method returns [gpiox_verr::R](gpiox_verr::R) reader structure"] impl crate::Readable for GPIOX_VERR {} #[doc = "GPIO version register"] pub mod gpiox_verr; #[doc = "GPIO identification register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [gpiox_ipidr](gpiox_ipidr) module"] pub type GPIOX_IPIDR = crate::Reg<u32, _GPIOX_IPIDR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _GPIOX_IPIDR; #[doc = "`read()` method returns [gpiox_ipidr::R](gpiox_ipidr::R) reader structure"] impl crate::Readable for GPIOX_IPIDR {} #[doc = "GPIO identification register"] pub mod gpiox_ipidr; #[doc = "GPIO size identification register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [gpiox_sidr](gpiox_sidr) module"] pub type GPIOX_SIDR = crate::Reg<u32, _GPIOX_SIDR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _GPIOX_SIDR; #[doc = "`read()` method returns [gpiox_sidr::R](gpiox_sidr::R) reader structure"] impl crate::Readable for GPIOX_SIDR {} #[doc = "GPIO size identification register"] pub mod gpiox_sidr;
#![cfg(test)] //use problem1::{sum, dedup, filter}; //use problem2::mat_mult; //use problem3::sieve; use problem4::{hanoi, Peg}; #[test] fn test_hanoi_3_disks() { let result = hanoi(3, Peg::A, Peg::B, Peg::C); assert_eq!(vec![ (Peg::A, Peg::C), (Peg::A, Peg::B), (Peg::C, Peg::B), (Peg::A, Peg::C), (Peg::B, Peg::A), (Peg::B, Peg::C), (Peg::A, Peg::C) ], result); assert_eq!(7, result.len()); } #[test] fn test_hanoi_4_disks() { let result = hanoi(4, Peg::A, Peg::B, Peg::C); assert_eq!(vec![ (Peg::A, Peg::B), (Peg::A, Peg::C), (Peg::B, Peg::C), (Peg::A, Peg::B), (Peg::C, Peg::A), (Peg::C, Peg::B), (Peg::A, Peg::B), (Peg::A, Peg::C), (Peg::B, Peg::C), (Peg::B, Peg::A), (Peg::C, Peg::A), (Peg::B, Peg::C), (Peg::A, Peg::B), (Peg::A, Peg::C), (Peg::B, Peg::C) ], result); }
use std::io; fn read_question() -> String { let mut input = String::new(); match io::stdin().read_line(&mut input) { Ok(_) => { input } Err(error) => { println!("error: {}", error); input } } } fn string_to_vector(input: String) -> Vec<u8> { input.into_bytes() } fn sum_vector(input: Vec<u8>) -> u32 { input.iter().fold(0, |acc, &x| acc+x as u32) } fn print_answer(value: u32) { if value % 2 == 1 { println!("Tak!"); } else { println!("Nie!"); } } fn main() { let question = read_question(); let text_binary_vector = string_to_vector(question); let text_sum = sum_vector(text_binary_vector); print_answer(text_sum); }
fn main() { let length = 5000; let mut y = vec![4.; length]; y[6] = 3.14; y.push(4.89); println!("{}, {}, {}", y[6], y[4999], y[5000]); }
use super::LocatedSpan; use nom::{ Slice, InputIter, Compare, CompareResult, FindToken, FindSubstring, ParseTo, Offset }; type StrSpan<'a> = LocatedSpan<&'a str>; type BytesSpan<'a> = LocatedSpan<&'a [u8]>; #[test] fn it_should_call_new_for_u8_successfully() { let input = &b"foobar"[..]; let output = BytesSpan { offset : 0, line : 1, fragment : input }; assert_eq!(BytesSpan::new(input), output); } #[test] fn it_should_call_new_for_str_successfully() { let input = &"foobar"[..]; let output = StrSpan { offset : 0, line : 1, fragment : input }; assert_eq!(StrSpan::new(input), output); } #[test] fn it_should_slice_for_str() { let str_slice = StrSpan::new("foobar"); assert_eq!(str_slice.slice(1..), StrSpan { offset: 1, line: 1, fragment: "oobar" }); assert_eq!(str_slice.slice(1..3), StrSpan { offset: 1, line: 1, fragment: "oo" }); assert_eq!(str_slice.slice(..3), StrSpan { offset: 0, line: 1, fragment: "foo" }); assert_eq!(str_slice.slice(..), str_slice); } #[test] fn it_should_slice_for_u8() { let bytes_slice = BytesSpan::new(b"foobar"); assert_eq!(bytes_slice.slice(1..), BytesSpan { offset: 1, line: 1, fragment: b"oobar" }); assert_eq!(bytes_slice.slice(1..3), BytesSpan { offset: 1, line: 1, fragment: b"oo" }); assert_eq!(bytes_slice.slice(..3), BytesSpan { offset: 0, line: 1, fragment: b"foo" }); assert_eq!(bytes_slice.slice(..), bytes_slice); } #[test] fn it_should_calculate_columns() { let input = StrSpan::new("foo bar"); let bar_idx = input.find_substring("bar").unwrap(); assert_eq!(input.slice(bar_idx..).get_column(), 9); } #[test] fn it_should_calculate_columns_accurately_with_non_ascii_chars() { let s = StrSpan::new("メカジキ"); assert_eq!(s.slice(6..).get_column_utf8(), Ok(3)); } #[test] #[should_panic(expected = "offset is too big")] fn it_should_panic_when_getting_column_if_offset_is_too_big() { let s = StrSpan { offset: usize::max_value(), fragment: "", line: 1 }; s.get_column(); } #[test] fn it_should_iterate_indices() { let str_slice = StrSpan::new("foobar"); assert_eq!(str_slice.iter_indices().collect::<Vec<(usize, char)>>(), vec![ (0, 'f'), (1, 'o'), (2, 'o'), (3, 'b'), (4, 'a'), (5, 'r') ]); assert_eq!(StrSpan::new("").iter_indices().collect::<Vec<(usize, char)>>(), vec![] ); } #[test] fn it_should_iterate_elements() { let str_slice = StrSpan::new("foobar"); assert_eq!(str_slice.iter_elements().collect::<Vec<char>>(), vec![ 'f', 'o', 'o', 'b', 'a', 'r' ]); assert_eq!(StrSpan::new("").iter_elements().collect::<Vec<char>>(), vec![] ); } #[test] fn it_should_position_char() { let str_slice = StrSpan::new("foobar"); assert_eq!(str_slice.position(|x| x == 'a'), Some(4)); assert_eq!(str_slice.position(|x| x == 'c'), None); } #[test] fn it_should_compare_elements() { assert_eq!(StrSpan::new("foobar").compare("foo"), CompareResult::Ok); assert_eq!(StrSpan::new("foobar").compare("bar"), CompareResult::Error); assert_eq!(StrSpan::new("foobar").compare("foobar"), CompareResult::Ok); assert_eq!(StrSpan::new("foobar").compare_no_case("fooBar"), CompareResult::Ok); assert_eq!(StrSpan::new("foobar").compare("foobarbaz"), CompareResult::Incomplete); // FIXME: WTF! The line below doesn't compile unless we stop comparing // LocatedSpan<&[u8]> with &str // // assert_eq!(BytesSpan::new(b"foobar").compare(b"foo"), CompareResult::Ok); assert_eq!(BytesSpan::new(b"foobar").compare("foo"), CompareResult::Ok); } #[test] fn it_should_find_token() { assert!('a'.find_token(StrSpan::new("foobar"))); assert!(b'a'.find_token(StrSpan::new("foobar"))); assert!(&(b'a').find_token(StrSpan::new("foobar"))); assert!(!'c'.find_token(StrSpan::new("foobar"))); assert!(!b'c'.find_token(StrSpan::new("foobar"))); assert!(!(&b'c').find_token(StrSpan::new("foobar"))); assert!(b'a'.find_token(BytesSpan::new(b"foobar"))); assert!(&(b'a').find_token(BytesSpan::new(b"foobar"))); assert!(!b'c'.find_token(BytesSpan::new(b"foobar"))); assert!(!(&b'c').find_token(BytesSpan::new(b"foobar"))); } #[test] fn it_should_find_substring() { assert_eq!(StrSpan::new("foobar").find_substring("bar"), Some(3)); assert_eq!(StrSpan::new("foobar").find_substring("baz"), None); assert_eq!(BytesSpan::new(b"foobar").find_substring("bar"), Some(3)); assert_eq!(BytesSpan::new(b"foobar").find_substring("baz"), None); } #[test] fn it_should_parse_to_string() { assert_eq!(StrSpan::new("foobar").parse_to(), Some("foobar".to_string())); assert_eq!(BytesSpan::new(b"foobar").parse_to(), Some("foobar".to_string())); } // https://github.com/Geal/nom/blob/eee82832fafdfdd0505546d224caa466f7d39a15/src/util.rs#L710-L720 #[test] fn it_should_calculate_offset_for_u8() { let s = b"abcd123"; let a = &s[..]; let b = &a[2..]; let c = &a[..4]; let d = &a[3..5]; assert_eq!(a.offset(b), 2); assert_eq!(a.offset(c), 0); assert_eq!(a.offset(d), 3); } // https://github.com/Geal/nom/blob/eee82832fafdfdd0505546d224caa466f7d39a15/src/util.rs#L722-L732 #[test] fn it_should_calculate_offset_for_str() { let s = StrSpan::new("abcřèÂßÇd123"); let a = s.slice(..); let b = a.slice(7..); let c = a.slice(..5); let d = a.slice(5..9); assert_eq!(a.offset(&b), 7); assert_eq!(a.offset(&c), 0); assert_eq!(a.offset(&d), 5); }
use crate::{ file_stream::new_file_stream, utils::{not_found_response, server_error_response}, }; use http::HeaderMap; use http_service::Body; use std::{fs::File, ops::Range, path::PathBuf}; use tide::Response; pub struct FileRequest { path: Option<PathBuf>, } impl FileRequest { pub fn new(path: Option<PathBuf>, _headers: &HeaderMap) -> Self { Self { path } } } impl FileRequest { pub async fn work(self) -> Response { let path = match self.path { None => return not_found_response(), Some(x) => x, }; let file = match File::open(path) { Ok(x) => x, Err(error) => { error!("{}", error); return server_error_response(); } }; let size = match file.metadata() { Ok(x) => x.len(), Err(error) => { error!("{}", error); return server_error_response(); } }; Response::new(Body::from_stream(new_file_stream( file, Range { start: 0, end: size, }, ))) } }
use std::collections::HashMap; use std::collections::HashSet; use std::collections::VecDeque; use std::hash::Hash; use std::cmp::Eq; use ndarray::Array; use ndarray::Array2; use rand::prelude::*; use rand::rngs::SmallRng; pub struct Stack<T> { inner: Vec<T>, } impl<T> Stack<T> { pub fn new() -> Stack<T> { Stack { inner: Vec::new() } } pub fn push(&mut self, value: T) { self.inner.push(value); } pub fn pop(&mut self) -> Option<T> { if self.inner.len() == 0 { return None; } Some(self.inner.swap_remove(self.inner.len() - 1)) } } #[derive(Copy, Clone, Eq, PartialEq, Hash)] pub struct Vec2(pub isize, pub isize); #[derive(Copy, Clone, Eq, PartialEq, Hash)] pub struct UVec2(pub usize, pub usize); pub struct WeightTable<T> { inner: HashMap<T, f64>, } pub trait Hashable: Hash + Eq {} impl<T: Hash + Eq> Hashable for T {} impl<T: Hashable + Copy> WeightTable<T> { pub fn new() -> WeightTable<T> { WeightTable { inner: HashMap::new(), } } pub fn len(&self) -> usize { self.inner.len() } pub fn normalize(&mut self) { let total: f64 = self.inner.values().sum(); for value in self.inner.values_mut() { *value /= total; } } pub fn contains(&self, kind: &T) -> bool { self.inner.contains_key(kind) } pub fn entry(&mut self, kind: T) -> std::collections::hash_map::Entry<T, f64> { self.inner.entry(kind) } pub fn get(&self, kind: &T) -> f64 { self.inner[kind] } pub fn kinds(&self) -> HashSet<T> { let mut set = HashSet::new(); for key in self.inner.keys() { set.insert(key.clone()); } set } } pub type CompatibilityMap<T> = HashMap<T, HashSet<CompatibleTile<T>>>; pub type CompatibleTile<T> = (T, Vec2); #[derive(Copy, Clone, PartialEq, Eq, Debug)] enum WavepointState { Uncollapsed, Collapsed, } #[derive(Clone, Debug)] struct Wavepoint<T: Hashable> { variants: HashSet<T>, state: WavepointState, } impl<T: Hashable> Wavepoint<T> { pub fn new(value: HashSet<T>) -> Wavepoint<T> { Wavepoint { variants: value, state: WavepointState::Uncollapsed, } } pub fn len(&self) -> usize { self.variants.len() } pub fn variants(&self) -> &HashSet<T> { &self.variants } pub fn remove_variant(&mut self, value: &T) { assert!(self.variants.len() > 1); self.variants.remove(value); } pub fn collapse(&mut self, value: T) { assert_ne!(self.state, WavepointState::Collapsed); self.variants.clear(); self.variants.insert(value); self.state = WavepointState::Collapsed; } pub fn is_collapsed(&self) -> bool { match self.state { WavepointState::Uncollapsed => false, WavepointState::Collapsed => true, } } pub fn get_value(&self) -> &T { assert_eq!(self.state, WavepointState::Collapsed); self.iter().next().unwrap() } pub fn iter(&self) -> impl std::iter::Iterator<Item = &'_ T> { self.variants.iter() } } struct EntropyTable<T> { entropies: Array2<f64>, weight_log_weights: HashMap<T, f64>, sum_of_weights: f64, sum_of_weights_log_weights: f64, sums_of_weights: Array2<f64>, sums_of_weights_log_weights: Array2<f64>, } pub struct Wavefunction<T: Hashable> { size: UVec2, weights: WeightTable<T>, coefficients: Array2<Wavepoint<T>>, entropy_table: EntropyTable<T>, } macro_rules! write_entropy { ($f: ident, $color:ident, $value:ident) => { write!($f, "{}[{}{:.2}]{}", termion::style::Bold, Fg($color), $value, termion::style::Reset)?; } } macro_rules! write_point { ($f: ident, $color:ident, $value:ident) => { write!($f, "{}{}[{}]{}", termion::style::Bold, Fg($color), $value, termion::style::Reset)?; } } use termion::color::*; impl std::fmt::Display for Wavefunction<char> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let (width, height) = (self.size.0, self.size.1); for row in 0..height { for col in 0..width { let wavepoint = &self.coefficients[[row, col]]; // let entropy = self.entropy_table.entropies[[col, row]]; // if wavepoint.is_collapsed() { // write_entropy!(f, White, entropy); // } // else { // write_entropy!(f, LightBlue, entropy); // } if wavepoint.is_collapsed() { let value = wavepoint.get_value(); match wavepoint.get_value() { 'L' => { write_point!(f, LightGreen, value); } 'S' => { write_point!(f, LightBlue, value); } 'C' => { write_point!(f, LightYellow, value); } 'A' => { write_point!(f, LightCyan, value); } 'B' => { write_point!(f, LightMagenta, value); } _ => { } } } else { write!(f, "{}[{}]{}", Fg(White), wavepoint.len(), termion::style::Reset)?; } } write!(f, "\n")?; } std::fmt::Result::Ok(()) } } impl<T: Hashable + Copy> Wavefunction<T> { pub fn new(size: UVec2, weights: WeightTable<T>) -> Wavefunction<T> { let coefficients = Wavefunction::derive_coefficients(size, &weights.kinds()); let entropies = Wavefunction::derive_entopy_table(size, &coefficients, &weights); Wavefunction { size, weights, coefficients, entropy_table: entropies, } } fn derive_coefficients(size: UVec2, kinds: &HashSet<T>) -> Array2<Wavepoint<T>> { Array::from_elem((size.0, size.1), Wavepoint::new(kinds.clone())) } fn derive_entopy_table(size: UVec2, coefficients: &Array2<Wavepoint<T>>, weights: &WeightTable<T>) -> EntropyTable<T> { let mut weight_log_weights = HashMap::new();// Vec::with_capacity(weights.len()); let mut sum_of_weights = 0f64; let mut sum_of_weights_log_weights = 0f64; for (index, variant) in weights.kinds().iter().enumerate() { let weight = weights.get(&variant); let wlw = weight * weight.ln(); sum_of_weights += weight; sum_of_weights_log_weights += wlw; weight_log_weights.insert(*variant, wlw); } let initial_entropy = sum_of_weights.ln() - (sum_of_weights_log_weights / sum_of_weights); let entropies = Array::from_elem((size.0, size.1), initial_entropy); let sums_of_weights = Array::from_elem((size.0, size.1), sum_of_weights); let sums_of_weights_log_weights = Array::from_elem((size.0, size.1), sum_of_weights_log_weights); EntropyTable { entropies, weight_log_weights, sum_of_weights, sum_of_weights_log_weights, sums_of_weights, sums_of_weights_log_weights } } pub fn possible_tiles(&self, coords: UVec2) -> &HashSet<T> { self.coefficients[[coords.0, coords.1]].variants() } pub fn is_fully_collapsed(&self) -> bool { self.coefficients .iter() .filter(|x| !x.is_collapsed()) .count() == 0 } fn get_collapse_state(&self, coords: UVec2, rng: &mut SmallRng) -> Option<&T> { let options = &self.coefficients[[coords.0, coords.1]]; let valid_weights: Vec<(&T, f64)> = options .iter() .filter_map(|item| { if self.weights.contains(item) { Some((item, self.weights.get(item))) } else { None } }) .collect(); let total_weights: f64 = valid_weights.iter().map(|i| i.1).sum(); let mut rnd = total_weights * rng.gen::<f64>(); let mut chosen = None; for (tile, weight) in valid_weights { rnd -= weight; if rnd < 0f64 { chosen = Some(tile); break; } } chosen } // Collapses the wavefunction at the given coordinates pub fn collapse(&mut self, coords: UVec2, rng: &mut SmallRng) { let collapsed_state = *self.get_collapse_state(coords, rng).unwrap(); self.coefficients[[coords.0, coords.1]].collapse(collapsed_state); } // Removed 'tile' from the list of possible tiles at 'coords' pub fn ban(&mut self, coords: UVec2, tile: T) { let weight = self.weights.get(&tile); self.entropy_table.sums_of_weights[[coords.0, coords.1]] -= weight; let wlw = self.entropy_table.weight_log_weights[&tile]; self.entropy_table.sums_of_weights_log_weights[[coords.0, coords.1]] -= wlw; let sum = self.weights.get(&tile); let sum_of_wlw = self.entropy_table.sums_of_weights_log_weights[[coords.0, coords.1]]; self.entropy_table.entropies[[coords.0, coords.1]] = sum.ln() - sum_of_wlw / sum; self.coefficients[[coords.0, coords.1]].remove_variant(&tile); } } pub struct Model<T: Hashable> { size: UVec2, wavefunction: Wavefunction<T>, stack: Stack<UVec2>, rng: SmallRng, compatibilities: CompatibilityMap<T>, } impl Model<char> { pub fn print(&self,) { println!("{}{}", termion::clear::All, self.wavefunction); } } impl<T: Hashable + Copy> Model<T> { pub fn new(seed: u64, wavefunction: Wavefunction<T>, compatibilities: CompatibilityMap<T>) -> Model<T> { Model { size: wavefunction.size, wavefunction, stack: Stack::new(), rng: SmallRng::seed_from_u64(seed), compatibilities, } } pub fn run(&mut self) { while self.iterate() { } } pub fn run_with_callback<F: Fn(&Self, u32)>(&mut self, func: F) { let mut iteration_count = 0; loop { func(&self, iteration_count); if !self.iterate() { break; } iteration_count += 1; } } pub fn iterate(&mut self) -> bool { if self.wavefunction.is_fully_collapsed() { return false; } let coords = self.min_entropy_coords(); self.wavefunction.collapse(coords, &mut self.rng); self.propagate(coords); true } fn propagate(&mut self, coords: UVec2) { self.stack.push(coords); let mut to_ban = VecDeque::with_capacity(4); while let Some(coords) = self.stack.pop() { let current_possible_tiles = self.wavefunction.possible_tiles(coords); for direction in valid_dirs(coords, self.size) { let other_coords = UVec2( ((coords.0 as isize) + direction.0) as usize, ((coords.1 as isize) + direction.1) as usize, ); for other_tile in self.wavefunction.possible_tiles(other_coords) { let other_tile_is_possible = current_possible_tiles.iter().any(|current_tile| { self.check_compatibility(*current_tile, *other_tile, direction) }); if !other_tile_is_possible { to_ban.push_front((other_coords, *other_tile)); } } } while let Some((coord, tile)) = to_ban.pop_back() { self.wavefunction.ban(coord, tile); self.stack.push(coord); } } } fn min_entropy_coords(&mut self) -> UVec2 { let mut min_entropy = std::f64::MAX; let mut min_entropy_coords = None; for x in 0..self.size.0 { for y in 0..self.size.1 { let coords = UVec2(x, y); if self.wavefunction.coefficients[[coords.0, coords.1]].is_collapsed() { continue; } let entropy = self.wavefunction.entropy_table.entropies[[coords.0, coords.1]]; let entropy_plus_noise = entropy - self.rng.gen::<f64>(); if entropy_plus_noise < min_entropy { min_entropy = entropy_plus_noise; min_entropy_coords = Some(coords); } } } min_entropy_coords.unwrap() } pub fn check_compatibility(&self, tile: T, other_tile: T, direction: Vec2) -> bool { if self.compatibilities.contains_key(&tile) { let tiles = self.compatibilities.get(&tile).unwrap(); return tiles.contains(&(other_tile, direction)); } false } } const UP: Vec2 = Vec2(0, 1); const LEFT: Vec2 = Vec2(-1, 0); const DOWN: Vec2 = Vec2(0, -1); const RIGHT: Vec2 = Vec2(1, 0); pub fn valid_dirs(coords: UVec2, size: UVec2) -> Vec<Vec2> { let mut dirs = Vec::new(); let (x, y) = (coords.0, coords.1); let (width, height) = (size.0, size.1); if x > 0 { dirs.push(LEFT); } if x < width - 1 { dirs.push(RIGHT); } if y > 0 { dirs.push(DOWN); } if y < height - 1 { dirs.push(UP); } dirs }
use lazy_static::lazy_static; pub fn evil_mode() -> bool { ARGS.evil_mode } struct Args { evil_mode: bool, } lazy_static! { static ref ARGS: Args = { let args: Vec<String> = std::env::args().skip(1).collect(); let first_arg = args.get(0); let evil_mode = first_arg .map(|arg| arg == "evil" || arg == "for-filipe") .unwrap_or(false); Args { evil_mode } }; }
use types::Position; use rmp::value::{Value, Integer}; use supported_languages::SupportedLanguage; use std::str::FromStr; #[derive(Debug)] pub enum NeovimRPCEvent { NewCursorPosition(Position), TextChangedI, BufRead(SupportedLanguage), } impl NeovimRPCEvent { pub fn new(event: &str, values: Vec<Value>) -> Option<Self> { match event { "language_server_new_cursor_position" => { if let (&Value::Integer(Integer::U64(line)), &Value::Integer(Integer::U64(character))) = (&values[0], &values[1]) { let pos = Position { line: line, character: character, }; Some(NeovimRPCEvent::NewCursorPosition(pos)) } else { None } }, "language_server_text_changed" => { Some(NeovimRPCEvent::TextChangedI) }, "lsp/bufread" => { if let Value::String(ref lang) = values[0] { Some(NeovimRPCEvent::BufRead(SupportedLanguage::from_str(lang).unwrap())) } else { None } }, _ => None } } }
use crate::lex::*; use crate::parse::*; use noisy_float::prelude::*; use std::collections::{HashMap, VecDeque}; use std::io::{Read, Write}; use std::{fmt, process}; #[derive(Debug, Clone)] pub struct Evaluator { macros: HashMap<Sym, Sym>, stack: Vec<Sym>, work: VecDeque<Sym>, } impl fmt::Display for Evaluator { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { for sym in &self.stack { write!(f, "{} ", sym)?; } write!(f, "|")?; for sym in &self.work { write!(f, " {}", sym)?; } Ok(()) } } macro_rules! pop_stack { ($self:ident, $($var:ident),*) => { $self.stack_assert(pop_stack!(@COUNT; $($var),*))?; $(let $var = $self.stack.pop().unwrap();)* }; (@COUNT; $($var:ident),*) => { <[()]>::len(&[$(pop_stack!(@SUBST; $var)),*]) }; (@SUBST; $_i:ident) => { () }; } impl Evaluator { pub fn new() -> Self { Self { macros: HashMap::new(), stack: Vec::new(), work: VecDeque::new(), } } pub fn with_std() -> Self { let mut new = Self::new(); new.load_std(); new } pub fn load_std(&mut self) { // SAFETY: The standard library does not change depending // on user input and is tested explicitly let std_lib = include_str!("std.paste"); self.extend_code(std_lib).unwrap(); } pub fn extend_code(&mut self, source: &str) -> Result<(), String> { let prog = parse(lex(source))?; self.work.extend(prog); Ok(()) } pub fn extend_program(&mut self, prog: Vec<Sym>) { self.work.extend(prog); } fn stack_assert(&self, len: usize) -> Result<(), String> { if len > self.stack.len() { return Err(format!( "expected {} items on the stack, found {}", len, self.stack.len() )); } Ok(()) } fn eval_native( &mut self, sym: Native, _input: &mut dyn Read, output: &mut dyn Write, ) -> Result<(), String> { match sym { Native::Assign => { pop_stack!(self, key, val); if key != val && key != Sym::Text("_".into()) { self.macros.insert(key, val); } } Native::Do => { pop_stack!(self, x); self.work.push_front(x); } Native::Tern => { pop_stack!(self, else_stmt, then_stmt, cond); match cond { Sym::Int(0) => self.work.push_front(else_stmt), Sym::Int(_) => self.work.push_front(then_stmt), _ => { self.stack.push(cond); self.stack.push(then_stmt); self.stack.push(else_stmt); return Err("invalid condition".into()); } } } Native::While => { pop_stack!(self, stmt, cond); match cond { Sym::Int(0) => (), Sym::Int(_) => { self.work.push_front(Sym::Native(Native::While)); self.work.push_front(Sym::Defer(Box::new(stmt.clone()))); self.work.push_front(stmt); } _ => { self.stack.push(cond); self.stack.push(stmt); return Err("invalid condition".into()); } } } Native::Copy => { pop_stack!(self, num); let len = self.stack.len(); match num { Sym::Int(amount) => { if amount < 0 || amount as usize > len { self.stack.push(num); return Err("invalid copy amount".into()); } for i in len.saturating_sub(amount as _)..len { self.stack.push(self.stack[i].clone()); } } _ => { self.stack.push(num); return Err("invalid copy".into()); } } } Native::Put => { pop_stack!(self, sym); let string; let buffer = match &sym { Sym::Text(s) => s.as_ref(), x => { string = match x { Sym::Int(s) => format!("{}", s), Sym::Float(s) => format!("{}", s), s => format!("{:?}", s), }; &string } }; output .write(buffer.as_bytes()) .map_err(|_| "failed to write to output")?; } Native::Add | Native::Sub | Native::Mul | Native::Div | Native::Eq | Native::Less => { pop_stack!(self, a, b); #[inline] fn concat(a: impl fmt::Display, b: impl fmt::Display) -> String { format!("{}{}", a, b) } let c = match (sym, a, b) { (Native::Add, Sym::Text(x), Sym::Text(y)) => Sym::text(concat(x, y)), (Native::Add, Sym::Text(s), Sym::Int(y)) => Sym::text(concat(s, y)), (Native::Add, Sym::Text(s), Sym::Float(y)) => Sym::text(concat(s, y)), (Native::Add, Sym::Int(x), Sym::Text(s)) => Sym::text(concat(x, s)), (Native::Add, Sym::Int(x), Sym::Int(y)) => Sym::Int(x + y), (Native::Add, Sym::Int(x), Sym::Float(y)) => Sym::Float(r64(x as _) + y), (Native::Add, Sym::Float(x), Sym::Text(s)) => Sym::text(concat(x, s)), (Native::Add, Sym::Float(x), Sym::Int(y)) => Sym::Float(x + r64(y as _)), (Native::Add, Sym::Float(x), Sym::Float(y)) => Sym::Float(x + y), (Native::Sub, Sym::Int(x), Sym::Int(y)) => Sym::Int(x - y), (Native::Sub, Sym::Int(x), Sym::Float(y)) => Sym::Float(r64(x as _) - y), (Native::Sub, Sym::Float(x), Sym::Int(y)) => Sym::Float(x - r64(y as _)), (Native::Sub, Sym::Float(x), Sym::Float(y)) => Sym::Float(x - y), (Native::Mul, Sym::Int(x), Sym::Int(y)) => Sym::Int(x * y), (Native::Mul, Sym::Int(x), Sym::Float(y)) => Sym::Float(r64(x as _) * y), (Native::Mul, Sym::Float(x), Sym::Int(y)) => Sym::Float(x * r64(y as _)), (Native::Mul, Sym::Float(x), Sym::Float(y)) => Sym::Float(x * y), (Native::Div, Sym::Int(x), Sym::Int(y)) => Sym::Int(x / y), (Native::Div, Sym::Int(x), Sym::Float(y)) => Sym::Float(r64(x as _) / y), (Native::Div, Sym::Float(x), Sym::Int(y)) => Sym::Float(x / r64(y as _)), (Native::Div, Sym::Float(x), Sym::Float(y)) => Sym::Float(x / y), (Native::Eq, x, y) => Sym::Int((x == y) as _), (Native::Less, Sym::Int(x), Sym::Int(y)) => Sym::Int((x < y) as _), (Native::Less, Sym::Int(x), Sym::Float(y)) => Sym::Int((r64(x as _) < y) as _), (Native::Less, Sym::Float(x), Sym::Int(y)) => Sym::Int((x < r64(y as _)) as _), (Native::Less, Sym::Float(x), Sym::Float(y)) => Sym::Int((x < y) as _), (_, a, b) => { self.stack.push(b); self.stack.push(a); return Err("operation is undefined".into()); } }; self.stack.push(c); } Native::Floor => { pop_stack!(self, num); match num { Sym::Int(_) => self.stack.push(num), Sym::Float(x) => self.stack.push(Sym::Int(x.floor().raw() as _)), _ => { self.stack.push(num); return Err("operation is undefined".into()); } } } Native::Exit => { let code = match self.stack.pop() { Some(Sym::Int(n)) => n as _, _ => -1, }; process::exit(code) } } Ok(()) } fn macro_replace(&mut self, sym: Sym) -> Sym { if !self.macros.contains_key(&sym) || matches!(sym, Sym::Defer(_)) { return sym; } let mut next = sym.clone(); let mut iter = 0; while let Some(val) = self.macros.get(&next) { next = val.clone(); iter += 1; debug_assert_ne!(sym, next); } if iter > 1 { self.macros.insert(sym, next.clone()); } next } pub fn done(&self) -> bool { self.work.is_empty() } pub fn step(&mut self, input: &mut dyn Read, output: &mut dyn Write) -> Result<(), String> { let sym_opt = self.work.pop_front().map(|old| self.macro_replace(old)); let sym = match sym_opt { Some(s) => s, None => return Ok(()), }; match sym { Sym::Native(n) => { if let e @ Err(_) = self.eval_native(n, input, output) { self.work.push_front(sym); return e; } } Sym::Block(s) => { s.iter() .rev() .for_each(|sym| self.work.push_front(sym.clone())); } Sym::Defer(s) => self.stack.push(*s), s => self.stack.push(s), } Ok(()) } pub fn run(&mut self, input: &mut dyn Read, output: &mut dyn Write) -> Result<(), String> { while !self.done() { self.step(input, output)?; } Ok(()) } } #[cfg(test)] mod test { use super::*; use std::io; fn eval(prog: Vec<Sym>, input: &mut dyn Read, output: &mut dyn Write) -> Result<(), String> { let mut eval = Evaluator::with_std(); eval.extend_program(prog); eval.run(input, output)?; Ok(()) } fn eval_helper(code: &str, expected: &str) { let prog = parse(lex(code)).unwrap(); let mut output = Vec::new(); eval(prog, &mut io::empty(), &mut output).unwrap(); assert_eq!(output.as_slice(), expected.as_bytes()); } #[test] fn eval_sanity_check() { eval_helper("", ""); } #[test] fn eval_hello() { let code = "1 ;{ \"hello\" ;put do } if"; eval_helper(code, "hello"); } #[test] fn eval_fibonacci() { let code = "\ (fib =' ;{;n = 0 1 (n >' 0) ;{xch over + (;n =' (n -' 1)) (n !=' 0)} while pop}) (put (fib 42))"; eval_helper(code, "267914296"); } #[test] fn eval_gcd() { let code = "\ (gcd =' ;{1 ;{(copy 2) < ;xch if over xch - (0 !=' over)} while xch pop}) (put (35 gcd' 91))"; eval_helper(code, "7"); } #[test] fn eval_power() { let code = "\ (pow =' ;{;n = ;k = (k >' 1) ;((n pow' (k -' 1)) *' n) ;n ?}) (0.9 pow' 100) put"; eval_helper(code, "0.000026561398887587544"); } #[test] fn eval_do_do_do() { let code = "test ;;put do ;;do ;do do do"; eval_helper(code, "test"); } #[test] fn eval_quick_maths() { let code = "put (9 -' ((3 +' 1) *' 2))'"; eval_helper(code, "1"); } #[test] fn eval_macro_simple() { let code = "(5 =' 3) (n =' 5) (put n)"; eval_helper(code, "3"); } #[test] fn eval_macro_circle() { let code = "(5 =' 3) (3 =' 5) (put 3) (put 5)"; eval_helper(code, "33"); } #[test] fn eval_macro_override() { let code = "(2 =' 1) (2 =' 3) (1 =' 2) (put 1)"; eval_helper(code, "3"); } #[test] fn eval_macro_blocks() { let code = "0 1 5 2 3 7 9 ;{ dup put ;b if } b = b"; eval_helper(code, "9732510"); } #[test] fn eval_while_loop() { let code = "0 1 1 1 1 ;(put a) while"; eval_helper(code, "aaaa"); } #[test] fn eval_ternary() { let code = "\ (test =' ;{== ;a ;b ? put}) (test 3 5) (test 4 4)"; eval_helper(code, "ba"); } }
mod world; use std::f64::consts::PI; #[cfg(test)] mod tests { use super::*; #[test] fn test_angles_on_axis() { let p1 = world::Point {x: 0.0, y: 0.0}; // Edge cases let p2 = world::Point {x: 0.0, y: 1.0}; assert_eq!(PI / 2.0, world::get_angle(p1, p2)); let p2 = world::Point {x: -1.0, y: 0.0}; assert_eq!(PI, world::get_angle(p1, p2)); let p2 = world::Point {x: 0.0, y: -1.0}; assert_eq!(PI + PI / 2.0, world::get_angle(p1, p2)); let p2 = world::Point {x: 1.0, y: 0.0}; assert_eq!(0.0, world::get_angle(p1, p2)); } #[test] fn test_45_degree_angles() { let p1 = world::Point {x: 0.0, y: 0.0}; let p2 = world::Point {x: 1.0, y: 1.0}; assert_eq!(PI / 4.0, world::get_angle(p1, p2)); let p2 = world::Point {x: -1.0, y: 1.0}; assert_eq!(PI - PI / 4.0, world::get_angle(p1, p2)); let p2 = world::Point {x: -1.0, y: -1.0}; assert_eq!(PI + PI / 4.0, world::get_angle(p1, p2)); let p2 = world::Point {x: 1.0, y: -1.0}; assert_eq!(2.0 * PI - PI / 4.0, world::get_angle(p1, p2)); } #[test] fn test_offset_angles() { let p1 = world::Point {x: 2.0, y: 2.0}; let p2 = world::Point {x: 3.0, y: 3.0}; assert_eq!(PI / 4.0, world::get_angle(p1, p2)); let p2 = world::Point {x: 1.0, y: 3.0}; assert_eq!(PI - PI / 4.0, world::get_angle(p1, p2)); let p2 = world::Point {x: 1.0, y: 1.0}; assert_eq!(PI + PI / 4.0, world::get_angle(p1, p2)); let p2 = world::Point {x: 3.0, y: 1.0}; assert_eq!(2.0 * PI - PI / 4.0, world::get_angle(p1, p2)); } #[test] fn test_world_update() { let body1 = world::Body { name: "body1", position: world::Point { x: 0.0, y: 0.0 }, velocity: world::Vector { x: 0.0, y: 0.0 }, mass: 1.0 }; let body2 = world::Body { name: "body2", position: world::Point { x: 100.0, y: 0.0 }, velocity: world::Vector { x: 0.0, y: 0.0 }, mass: 2.0 }; let mut bodies = vec![body1, body2]; bodies = world::update_bodies(bodies); // body 1 should have twice the velocity of body 2 in the opposite direction assert_eq!(-2.0 * bodies[1].velocity.x, bodies[0].velocity.x); } }
#![allow(dead_code)] use glium::{ glutin::{event, event_loop}, Display, }; use fnv::FnvHashMap; use crate::engine; pub enum Request<'a, 'b: 'a> { Event { event: &'a event::Event<'b, ()>, should_update_ui: &'a mut bool, should_exit: &'a mut bool, }, SetUi { needs_redraw: &'a mut bool, }, Redraw, } /// In most of the examples the `glutin` crate is used for providing the window context and /// events while the `glium` crate is used for displaying `conrod_core::render::Primitives` to the /// screen. /// /// This function simplifies some of the boilerplate involved in limiting the redraw rate in the /// glutin+glium event loop. pub fn run_loop<F>(display: Display, event_loop: event_loop::EventLoop<()>, mut callback: F) -> ! where F: 'static + FnMut(Request, &Display), { let sixteen_ms = std::time::Duration::from_millis(16); let mut next_update = None; let mut ui_update_needed = false; event_loop.run(move |event, _, control_flow| { { let mut should_update_ui = false; let mut should_exit = false; callback( Request::Event { event: &event, should_update_ui: &mut should_update_ui, should_exit: &mut should_exit, }, &display, ); ui_update_needed |= should_update_ui; if should_exit { *control_flow = event_loop::ControlFlow::Exit; return; } } // We don't want to draw any faster than 60 FPS, so set the UI only on every 16ms, unless: // - this is the very first event, or // - we didn't request update on the last event and new events have arrived since then. let should_set_ui_on_main_events_cleared = next_update.is_none() && ui_update_needed; match (&event, should_set_ui_on_main_events_cleared) { (event::Event::NewEvents(event::StartCause::Init { .. }), _) | (event::Event::NewEvents(event::StartCause::ResumeTimeReached { .. }), _) | (event::Event::MainEventsCleared, true) => { next_update = Some(std::time::Instant::now() + sixteen_ms); ui_update_needed = false; let mut needs_redraw = false; callback( Request::SetUi { needs_redraw: &mut needs_redraw, }, &display, ); if needs_redraw { display.gl_window().window().request_redraw(); } else { // We don't need to redraw anymore until more events arrives. next_update = None; } } _ => {} } if let Some(next_update) = next_update { *control_flow = event_loop::ControlFlow::WaitUntil(next_update); } else { *control_flow = event_loop::ControlFlow::Wait; } // Request redraw if needed. match &event { event::Event::RedrawRequested(_) => { callback(Request::Redraw, &display); } _ => {} } }) } // Conversion functions for converting between types from glium's version of `winit` and // `conrod_core`. conrod_winit::v023_conversion_fns!(); widget_ids! { pub struct Ids { canvas, title, board, current_player, undo_button, redo_button, white_score, black_score } } pub struct App { pub board_state: [[Player; 8]; 8], pub player: Player, pub engine: engine::board::Board, pub previous_states: Vec<engine::board::Board>, pub turn: usize, pub winner: Player, pub transposition_table: fnv::FnvHashMap<(u64, u64), (i16, u8)> } impl App { /// Simple constructor for the `DemoApp`. pub fn new() -> Self { App { board_state: [ [Player::None, Player::None, Player::None, Player::None, Player::None, Player::None, Player::None, Player::None], [Player::None, Player::None, Player::None, Player::None, Player::None, Player::None, Player::None, Player::None], [Player::None, Player::None, Player::None, Player::Valid, Player::None, Player::None, Player::None, Player::None], [Player::None, Player::None, Player::Valid, Player::White, Player::Black, Player::None, Player::None, Player::None], [Player::None, Player::None, Player::None, Player::Black, Player::White, Player::Valid, Player::None, Player::None], [Player::None, Player::None, Player::None, Player::None, Player::Valid, Player::None, Player::None, Player::None], [Player::None, Player::None, Player::None, Player::None, Player::None, Player::None, Player::None, Player::None], [Player::None, Player::None, Player::None, Player::None, Player::None, Player::None, Player::None, Player::None] ], player: Player::Black, // default player engine: engine::board::Board::new(), previous_states: Vec::new(), turn: 0, winner: Player::None, transposition_table: fnv::FnvHashMap::default() } } pub fn place_tile(&mut self, cell: [u8; 2]) { self.previous_states.truncate(self.turn + 1); self.turn += 1; self.previous_states.push(self.engine.clone()); self.engine = self.engine.place_tile(engine::board::get_bitmask_for_index(engine::board::get_index_from_move(engine::board::Move{col: cell[1] as i8, row: cell[0] as i8}))); self.reinitialize_board(); self.engine = self.engine.place_tile(crate::engine::AI::AI::get_minimax_move(self.engine, &mut self.transposition_table)); self.reinitialize_board(); } pub fn undo(&mut self) { if self.turn != 0 { self.turn -= 1; self.engine = self.previous_states[self.turn]; self.reinitialize_board(); } } // pub fn redo(&mut self) { // if self.turn != self.previous_states.len() { // self.turn += 1; // self.engine = self.previous_states[self.turn]; // self.reinitialize_board(); // } // } pub fn get_white_score(&self) -> u32 { self.engine.white_bitboard.count_ones() } pub fn get_black_score(&self) -> u32 { self.engine.black_bitboard.count_ones() } fn reinitialize_board(&mut self) { let new_board = self.engine.get_board(); self.player = match self.engine.turn { engine::enums::Player::Black => Player::Black, engine::enums::Player::White => Player::White }; for row in 0..8 { for col in 0..8 { match new_board[row][col] { engine::enums::Position::White => {self.board_state[row][col] = Player::White}, engine::enums::Position::Black => {self.board_state[row][col] = Player::Black}, engine::enums::Position::Valid => {self.board_state[row][col] = Player::Valid}, engine::enums::Position::Empty => {self.board_state[row][col] = Player::None}, } } } } } #[derive(Copy, Clone, PartialEq)] pub enum Player { Black, White, Valid, None }
use vsl_ast::VSLEntity; use std::hash::Hash; pub trait VSLDecl: VSLEntity { fn get_name(&self) -> &String; }
//! Hubcaps provides a set of building blocks for interacting with the Github API //! //! # Examples //! //! Typical use will require instantiation of a Github client. Which requires //! a user agent string, a `hyper::Client`, and set of `hubcaps::Credentials`. //! //! The hyper client should be configured with tls. //! //! ```no_run //! extern crate hubcaps; //! extern crate hyper; //! extern crate hyper_native_tls; //! //! use hubcaps::{Credentials, Github}; //! use hyper::Client; //! use hyper::net::HttpsConnector; //! use hyper_native_tls::NativeTlsClient; //! //! fn main() { //! let github = Github::new( //! String::from("user-agent-name"), //! Client::with_connector( //! HttpsConnector::new( //! NativeTlsClient::new().unwrap() //! ) //! ), //! Credentials::Token( //! String::from("personal-access-token") //! ) //! ); //! } //! ``` //! //! Github enterprise users will want to create a client with the //! [Github#host](struct.Github.html#method.host) method //! //! Access to various services are provided via methods on instances of the `Github` type. //! //! The convention for executing operations typically looks like //! `github.repo(.., ..).service().operation(OperationOptions)` where operation may be `create`, //! `delete`, etc. //! Services and their types are packaged under their own module namespace. //! A service interface will provide access to operations and operations may access options types //! that define the various parameter options available for the operation. Most operation option //! types expose `builder()` methods for a builder oriented style of constructing options. //! //! # Errors //! //! Operations typically result in a `hubcaps::Result` Type which is an alias for Rust's //! built-in Result with the Err Type fixed to the //! [hubcaps::Error](errors/enum.Error.html) enum type. //! #![allow(missing_docs)] // todo: make this a deny eventually #[macro_use] extern crate error_chain; #[macro_use] extern crate log; #[macro_use] extern crate hyper; #[macro_use] extern crate serde_derive; extern crate serde; extern crate serde_json; extern crate url; // all the modules! use serde::de::DeserializeOwned; pub mod branches; pub mod git; pub mod users; pub mod comments; pub mod review_comments; pub mod pull_commits; pub mod keys; pub mod gists; pub mod deployments; pub mod errors; pub mod hooks; pub mod issues; pub mod labels; pub mod releases; pub mod repositories; pub mod statuses; pub mod pulls; pub mod search; pub mod teams; pub mod organizations; pub use errors::{Error, ErrorKind, Result}; use gists::{Gists, UserGists}; use search::Search; use hyper::Client; use hyper::client::RequestBuilder; use hyper::method::Method; use hyper::header::{qitem, Accept, Authorization, ContentLength, UserAgent}; use hyper::mime::Mime; use hyper::status::StatusCode; use repositories::{Repository, Repositories, UserRepositories, OrganizationRepositories}; use organizations::{Organization, Organizations, UserOrganizations}; use std::fmt; use std::io::Read; use url::Url; use std::collections::HashMap; /// Link header type header! { (Link, "Link") => [String] } const DEFAULT_HOST: &'static str = "https://api.github.com"; /// alias for Result that infers hubcaps::Error as Err // pub type Result<T> = std::result::Result<T, Error>; /// Github defined Media types /// See [this doc](https://developer.github.com/v3/media/) for more for more information #[derive(Clone, Copy)] pub enum MediaType { /// Return json (the default) Json, /// Return json in preview form Preview(&'static str), } impl Default for MediaType { fn default() -> MediaType { MediaType::Json } } impl From<MediaType> for Mime { fn from(media: MediaType) -> Mime { match media { MediaType::Json => "application/vnd.github.v3+json".parse().unwrap(), MediaType::Preview(codename) => { format!("application/vnd.github.{}-preview+json", codename) .parse() .unwrap() } } } } /// enum representation of Github list sorting options #[derive(Clone, Debug, PartialEq)] pub enum SortDirection { /// Sort in ascending order (the default) Asc, /// Sort in descending order Desc, } impl fmt::Display for SortDirection { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { SortDirection::Asc => "asc", SortDirection::Desc => "desc", }.fmt(f) } } impl Default for SortDirection { fn default() -> SortDirection { SortDirection::Asc } } /// Various forms of authentication credentials supported by Github #[derive(Debug, PartialEq)] pub enum Credentials { /// No authentication (the default) None, /// Oauth token string /// https://developer.github.com/v3/#oauth2-token-sent-in-a-header Token(String), /// Oauth client id and secret /// https://developer.github.com/v3/#oauth2-keysecret Client(String, String), } impl Default for Credentials { fn default() -> Credentials { Credentials::None } } /// Entry point interface for interacting with Github API pub struct Github { host: String, agent: String, client: Client, credentials: Credentials, } impl Github { /// Create a new Github instance. This will typically be how you interface with all /// other operations pub fn new<A>(agent: A, client: Client, credentials: Credentials) -> Github where A: Into<String>, { Github::host(DEFAULT_HOST, agent, client, credentials) } /// Create a new Github instance hosted at a custom location. /// Useful for github enterprise installations ( yourdomain.com/api/v3/ ) pub fn host<H, A>(host: H, agent: A, client: Client, credentials: Credentials) -> Github where H: Into<String>, A: Into<String>, { Github { host: host.into(), agent: agent.into(), client: client, credentials: credentials, } } /// Return a reference to a Github repository pub fn repo<O, R>(&self, owner: O, repo: R) -> Repository where O: Into<String>, R: Into<String>, { Repository::new(self, owner, repo) } /// Return a reference to the collection of repositories owned by and /// associated with an owner pub fn user_repos<S>(&self, owner: S) -> UserRepositories where S: Into<String>, { UserRepositories::new(self, owner) } /// Return a reference to the collection of repositories owned by the user /// associated with the current authentication credentials pub fn repos(&self) -> Repositories { Repositories::new(self) } pub fn org<O>(&self, org: O) -> Organization where O: Into<String>, { Organization::new(self, org) } /// Return a reference to the collection of organizations that the user /// associated with the current authentication credentials is in pub fn orgs(&self) -> Organizations { Organizations::new(self) } /// Return a reference to the collection of organizations a user /// is publicly associated with pub fn user_orgs<U>(&self, user: U) -> UserOrganizations where U: Into<String>, { UserOrganizations::new(self, user) } /// Return a reference to an interface that provides access to a user's gists pub fn user_gists<O>(&self, owner: O) -> UserGists where O: Into<String>, { UserGists::new(self, owner) } /// Return a reference to an interface that provides access to the /// gists belonging to the owner of the token used to configure this client pub fn gists(&self) -> Gists { Gists::new(self) } /// Return a reference to an interface that provides access to search operations pub fn search(&self) -> Search { Search::new(self) } /// Return a reference to the collection of repositories owned by and /// associated with an organization pub fn org_repos<O>(&self, org: O) -> OrganizationRepositories where O: Into<String>, { OrganizationRepositories::new(self, org) } fn authenticate(&self, method: Method, url: String) -> RequestBuilder { match self.credentials { Credentials::Token(ref token) => { self.client.request(method, &url).header(Authorization( format!("token {}", token), )) } Credentials::Client(ref id, ref secret) => { let mut parsed = Url::parse(&url).unwrap(); parsed .query_pairs_mut() .append_pair("client_id", id) .append_pair("client_secret", secret); self.client.request(method, parsed) } Credentials::None => self.client.request(method, &url), } } fn iter<'a, D, I>(&'a self, uri: String, into_items: fn(D) -> Vec<I>) -> Result<Iter<'a, D, I>> where D: DeserializeOwned, { self.iter_media(uri, into_items, MediaType::Json) } fn iter_media<'a, D, I>( &'a self, uri: String, into_items: fn(D) -> Vec<I>, media_type: MediaType, ) -> Result<Iter<'a, D, I>> where D: DeserializeOwned, { Iter::new(self, self.host.clone() + &uri, into_items, media_type) } fn request<D>( &self, method: Method, uri: String, body: Option<&[u8]>, media_type: MediaType, ) -> Result<(Option<Links>, D)> where D: DeserializeOwned, { let builder = self.authenticate(method, uri) .header(UserAgent(self.agent.to_owned())) .header(Accept(vec![qitem(From::from(media_type))])); let mut res = (match body { Some(ref bod) => builder.body(*bod).send(), _ => builder.send(), })?; let mut body = match res.headers.clone().get::<ContentLength>() { Some(&ContentLength(len)) => String::with_capacity(len as usize), _ => String::new(), }; res.read_to_string(&mut body)?; let links = res.headers.get::<Link>().map(|&Link(ref value)| { Links::new(value.to_owned()) }); debug!("rec response {:#?} {:#?} {}", res.status, res.headers, body); match res.status { StatusCode::Conflict | StatusCode::BadRequest | StatusCode::UnprocessableEntity | StatusCode::Unauthorized | StatusCode::NotFound | StatusCode::Forbidden => { Err( ErrorKind::Fault { code: res.status, error: serde_json::from_str::<errors::ClientError>(&body)?, }.into(), ) } _ => Ok((links, serde_json::from_str::<D>(&body)?)), } } fn request_entity<D>( &self, method: Method, uri: String, body: Option<&[u8]>, media_type: MediaType, ) -> Result<D> where D: DeserializeOwned, { self.request(method, uri, body, media_type).map( |(_, entity)| { entity }, ) } fn get<D>(&self, uri: &str) -> Result<D> where D: DeserializeOwned, { self.get_media(uri, MediaType::Json) } fn get_media<D>(&self, uri: &str, media: MediaType) -> Result<D> where D: DeserializeOwned, { self.request_entity(Method::Get, self.host.clone() + uri, None, media) } fn delete(&self, uri: &str) -> Result<()> { match self.request_entity::<()>( Method::Delete, self.host.clone() + uri, None, MediaType::Json, ) { Err(Error(ErrorKind::Codec(_), _)) => Ok(()), otherwise => otherwise, } } fn post<D>(&self, uri: &str, message: &[u8]) -> Result<D> where D: DeserializeOwned, { self.request_entity( Method::Post, self.host.clone() + uri, Some(message), MediaType::Json, ) } fn patch_media<D>(&self, uri: &str, message: &[u8], media: MediaType) -> Result<D> where D: DeserializeOwned, { self.request_entity(Method::Patch, self.host.clone() + uri, Some(message), media) } fn patch<D>(&self, uri: &str, message: &[u8]) -> Result<D> where D: DeserializeOwned, { self.patch_media(uri, message, MediaType::Json) } fn put_no_response(&self, uri: &str, message: &[u8]) -> Result<()> { match self.put(uri, message) { Err(Error(ErrorKind::Codec(_), _)) => Ok(()), otherwise => otherwise, } } fn put<D>(&self, uri: &str, message: &[u8]) -> Result<D> where D: DeserializeOwned, { self.request_entity( Method::Put, self.host.clone() + uri, Some(message), MediaType::Json, ) } } /// An abstract type used for iterating over result sets pub struct Iter<'a, D, I> { github: &'a Github, next_link: Option<String>, into_items: fn(D) -> Vec<I>, items: Vec<I>, media_type: MediaType, } impl<'a, D, I> Iter<'a, D, I> where D: DeserializeOwned, { /// creates a new instance of an Iter pub fn new( github: &'a Github, uri: String, into_items: fn(D) -> Vec<I>, media_type: MediaType, ) -> Result<Iter<'a, D, I>> { let (links, payload) = github.request::<D>(Method::Get, uri, None, media_type)?; let mut items = into_items(payload); items.reverse(); // we pop from the tail Ok(Iter { github: github, next_link: links.and_then(|l| l.next()), into_items: into_items, items: items, media_type: media_type, }) } fn set_next(&mut self, next: Option<String>) { self.next_link = next; } } impl<'a, D, I> Iterator for Iter<'a, D, I> where D: DeserializeOwned, { type Item = I; fn next(&mut self) -> Option<I> { self.items.pop().or_else(|| { self.next_link.clone().and_then(|ref next_link| { self.github .request::<D>(Method::Get, next_link.to_owned(), None, self.media_type) .ok() .and_then(|(links, payload)| { let mut next_items = (self.into_items)(payload); next_items.reverse(); // we pop() from the tail self.set_next(links.and_then(|l| l.next())); self.items = next_items; self.next() }) }) }) } } /// An abstract collection of Link header urls /// Exposes interfaces to access link relations github typically /// sends as headers #[derive(Debug)] pub struct Links { values: HashMap<String, String>, } impl Links { /// Creates a new Links instance given a raw header string value pub fn new<V>(value: V) -> Links where V: Into<String>, { let values = value .into() .split(",") .map(|link| { let parts = link.split(";").collect::<Vec<_>>(); ( parts[1].to_owned().replace(" rel=\"", "").replace("\"", ""), parts[0] .to_owned() .replace("<", "") .replace(">", "") .replace(" ", ""), ) }) .fold(HashMap::new(), |mut acc, (rel, link)| { acc.insert(rel, link); acc }); Links { values: values } } /// Returns next link url, when available pub fn next(&self) -> Option<String> { self.values.get("next").map(|s| s.to_owned()) } /// Returns prev link url, when available pub fn prev(&self) -> Option<String> { self.values.get("prev").map(|s| s.to_owned()) } /// Returns last link url, when available pub fn last(&self) -> Option<String> { self.values.get("last").map(|s| s.to_owned()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse_links() { let links = Links::new(r#"<linknext>; rel="next", <linklast>; rel="last""#); assert_eq!(links.next(), Some("linknext".to_owned())); assert_eq!(links.last(), Some("linklast".to_owned())); } #[test] fn default_sort_direction() { let default: SortDirection = Default::default(); assert_eq!(default, SortDirection::Asc) } #[test] fn default_credentials() { let default: Credentials = Default::default(); assert_eq!(default, Credentials::None) } }
#[doc = "Reader of register MT_STATUS"] pub type R = crate::R<u32, super::MT_STATUS>; #[doc = "Reader of field `BLESS_STATE`"] pub type BLESS_STATE_R = crate::R<bool, bool>; #[doc = "Reader of field `MT_CURR_STATE`"] pub type MT_CURR_STATE_R = crate::R<u8, u8>; #[doc = "Reader of field `HVLDO_STARTUP_CURR_STATE`"] pub type HVLDO_STARTUP_CURR_STATE_R = crate::R<u8, u8>; #[doc = "Reader of field `LL_CLK_STATE`"] pub type LL_CLK_STATE_R = crate::R<bool, bool>; impl R { #[doc = "Bit 0 - 1'b0 - BLESS in DPSLP state 1'b1 - BLESS in ACTIVE state"] #[inline(always)] pub fn bless_state(&self) -> BLESS_STATE_R { BLESS_STATE_R::new((self.bits & 0x01) != 0) } #[doc = "Bits 1:4 - This register reflects the current state of the MT FSM 4'h0 - IDLE 4'h1 - BLERD_DEEPSLEEP 4'h2 - HVLDO_STARTUP 4'h3 - WAIT_CLK 4'h4 - BLERD_IDLE 4'h5 - SWITCH_EN 4'h6 - ACTIVE 4'h7 - ISOLATE 4'h8 - WAIT_IDLE 4'h9 - XTAL_DISABLE 4'hA - HVLDO_DISABLE"] #[inline(always)] pub fn mt_curr_state(&self) -> MT_CURR_STATE_R { MT_CURR_STATE_R::new(((self.bits >> 1) & 0x0f) as u8) } #[doc = "Bits 5:7 - This register reflects the current state of the HVLDO Startup FSM 3'h0 - HVLDO_OFF 3'h1 - HVLDO_WAIT 3'h2 - HVLDO_SAMPLE 3'h3 - HVLDO_ENABLED 3'h4 - HVLDO_SET_BYPASS"] #[inline(always)] pub fn hvldo_startup_curr_state(&self) -> HVLDO_STARTUP_CURR_STATE_R { HVLDO_STARTUP_CURR_STATE_R::new(((self.bits >> 5) & 0x07) as u8) } #[doc = "Bit 8 - This bit indicates when the Link Layer registers are accessible upon a DSM exit. This bit should not be used after a DSM entry command has been issued. 1'b0 - Link Layer clock is not available 1'b1 - Link Layer clock is active"] #[inline(always)] pub fn ll_clk_state(&self) -> LL_CLK_STATE_R { LL_CLK_STATE_R::new(((self.bits >> 8) & 0x01) != 0) } }
#[derive(Debug)] struct RpcCaller { addr: String, user: String, pass: String, } #[derive(Debug)] struct ChainConfig { port_decimal: u32, users: Vec<String>, } fn main() { println!("This is a demo demonstrating a lightning payments across 2 different chains"); let mut chains = std::collections::HashMap::new(); chains.insert("regtest", ChainConfig { port_decimal: 4, users: vec![String::from("alice"), String::from("bob")], }); chains.insert("liquid-regtest", ChainConfig { port_decimal: 5, users: vec![String::from("bob"), String::from("carol")], }); println!("Chains considered (with their users): {:?}", chains); let mut bitcoinds = std::collections::HashMap::new(); for (chain_name, chain_config) in chains.iter() { let mut bitcoinds_for_chain = std::collections::HashMap::new(); for user in chain_config.users.iter() { let rpc_port = format!("185{}5", chain_config.port_decimal); bitcoinds_for_chain.insert(user, RpcCaller { addr: format!("0.0.0.0:{}", rpc_port), user: format!("user{}", rpc_port), pass: format!("password{}", rpc_port), }); } bitcoinds.insert(chain_name, bitcoinds_for_chain); } println!("Bitcoind instances to connect to: {:?}", bitcoinds); // Wait for daemons to start std::thread::sleep(std::time::Duration::from_secs(5)); println!("All done. Exiting in 5 seconds..."); std::thread::sleep(std::time::Duration::from_secs(5)); }
#![feature( cell_update, never_type, exhaustive_patterns, generators, generator_trait )] use log::{debug, info, trace}; use nes::ppu::PpuStep; use nes::NesStep; use sdl2::controller::Button as SdlButton; use sdl2::event::Event as SdlEvent; use sdl2::keyboard::Keycode as SdlKeycode; use std::fs; use std::io; use std::ops::{Generator, GeneratorState}; use std::path::PathBuf; use std::pin::Pin; use std::process; use std::thread; use std::time::{Duration, Instant}; use structopt::StructOpt; use lochnes::{input, nes, rom, video}; fn main() { let opts = Options::from_args(); stderrlog::new() .module(module_path!()) .quiet(opts.quiet || opts.verbose == 6) .verbosity(opts.verbose as usize) .init() .expect("Failed to set up logging"); let run_result = run(opts); match run_result { Ok(_) => {} Err(err) => { eprintln!("{:?}", err); process::exit(1); } } } #[derive(StructOpt, Debug)] #[structopt(name = "lochnes")] struct Options { #[structopt(name = "ROM", parse(from_os_str))] rom: PathBuf, #[structopt(long = "scale")] scale: Option<u32>, #[structopt(short = "q", long = "quiet")] quiet: bool, #[structopt(short = "v", parse(from_occurrences))] verbose: u8, } fn run(opts: Options) -> Result<(), LochnesError> { debug!("Options: {:#?}", opts); #[cfg(feature = "easter-egg")] { if opts.verbose == 6 { let bytes = include_bytes!("../tests/fixtures/egg.nes"); let mut bytes: Vec<u8> = bytes.to_vec(); let nmi = &mut bytes[0x400C]; *nmi = nmi.wrapping_add(opts.verbose / 2); let rom = rom::Rom::from_bytes(bytes.into_iter())?; run_rom(opts, rom)?; return Ok(()); } } let bytes = fs::read(&opts.rom)?; let rom = rom::Rom::from_bytes(bytes.into_iter())?; debug!("ROM header: {:#04X?}", rom.header); run_rom(opts, rom)?; Ok(()) } fn run_rom(opts: Options, rom: rom::Rom) -> Result<(), LochnesError> { const NES_REFRESH_RATE: Duration = Duration::from_nanos(1_000_000_000 / 60); const NES_WIDTH: u32 = 256; const NES_HEIGHT: u32 = 240; let scale = opts.scale.unwrap_or(1); let window_width = NES_WIDTH * scale; let window_height = NES_HEIGHT * scale; let sdl = sdl2::init().map_err(LochnesError::Sdl2Error)?; let sdl_video = sdl.video().map_err(LochnesError::Sdl2Error)?; let sdl_window = sdl_video .window("Lochnes", window_width, window_height) .opengl() .build()?; let mut sdl_canvas = sdl_window.into_canvas().build()?; let sdl_texture_creator = sdl_canvas.texture_creator(); let sdl_controllers = sdl.game_controller().map_err(LochnesError::Sdl2Error)?; let num_sdl_controllers = sdl_controllers .num_joysticks() .map_err(LochnesError::Sdl2Error)?; let sdl_controller_index = (0..num_sdl_controllers).find_map(|n| { if sdl_controllers.is_game_controller(n) { Some(n) } else { None } }); let _sdl_controller = sdl_controller_index .map(|index| sdl_controllers.open(index)) .transpose()?; let mut sdl_event_pump = sdl.event_pump().map_err(LochnesError::Sdl2Error)?; let video = &video::TextureBufferedVideo::new(&sdl_texture_creator, NES_WIDTH, NES_HEIGHT)?; let mut input_state = input::InputState::default(); let input = &input::SampledInput::new(input_state); let io = nes::NesIoWith { video, input }; let nes = nes::Nes::new(&io, rom); let mut run_nes = nes.run(); 'running: loop { let frame_start = Instant::now(); for event in sdl_event_pump.poll_iter() { match event { SdlEvent::Quit { .. } | SdlEvent::KeyDown { keycode: Some(SdlKeycode::Escape), .. } => { break 'running; } SdlEvent::KeyDown { keycode: Some(SdlKeycode::Z), .. } | SdlEvent::ControllerButtonDown { button: SdlButton::A, .. } => { input_state.joypad_1.a = true; } SdlEvent::KeyUp { keycode: Some(SdlKeycode::Z), .. } | SdlEvent::ControllerButtonUp { button: SdlButton::A, .. } => { input_state.joypad_1.a = false; } SdlEvent::KeyDown { keycode: Some(SdlKeycode::X), .. } | SdlEvent::ControllerButtonDown { button: SdlButton::B, .. } | SdlEvent::ControllerButtonDown { button: SdlButton::X, .. } => { input_state.joypad_1.b = true; } SdlEvent::KeyUp { keycode: Some(SdlKeycode::X), .. } | SdlEvent::ControllerButtonUp { button: SdlButton::B, .. } | SdlEvent::ControllerButtonUp { button: SdlButton::X, .. } => { input_state.joypad_1.b = false; } SdlEvent::KeyDown { keycode: Some(SdlKeycode::Return), .. } | SdlEvent::ControllerButtonDown { button: SdlButton::Start, .. } => { input_state.joypad_1.start = true; } SdlEvent::KeyUp { keycode: Some(SdlKeycode::Return), .. } | SdlEvent::ControllerButtonUp { button: SdlButton::Start, .. } => { input_state.joypad_1.start = false; } SdlEvent::KeyDown { keycode: Some(SdlKeycode::Backslash), .. } | SdlEvent::ControllerButtonDown { button: SdlButton::Back, .. } => { input_state.joypad_1.select = true; } SdlEvent::KeyUp { keycode: Some(SdlKeycode::Backslash), .. } | SdlEvent::ControllerButtonUp { button: SdlButton::Back, .. } => { input_state.joypad_1.select = false; } SdlEvent::KeyDown { keycode: Some(SdlKeycode::Up), .. } | SdlEvent::ControllerButtonDown { button: SdlButton::DPadUp, .. } => { input_state.joypad_1.up = true; } SdlEvent::KeyUp { keycode: Some(SdlKeycode::Up), .. } | SdlEvent::ControllerButtonUp { button: SdlButton::DPadUp, .. } => { input_state.joypad_1.up = false; } SdlEvent::KeyDown { keycode: Some(SdlKeycode::Down), .. } | SdlEvent::ControllerButtonDown { button: SdlButton::DPadDown, .. } => { input_state.joypad_1.down = true; } SdlEvent::KeyUp { keycode: Some(SdlKeycode::Down), .. } | SdlEvent::ControllerButtonUp { button: SdlButton::DPadDown, .. } => { input_state.joypad_1.down = false; } SdlEvent::KeyDown { keycode: Some(SdlKeycode::Left), .. } | SdlEvent::ControllerButtonDown { button: SdlButton::DPadLeft, .. } => { input_state.joypad_1.left = true; } SdlEvent::KeyUp { keycode: Some(SdlKeycode::Left), .. } | SdlEvent::ControllerButtonUp { button: SdlButton::DPadLeft, .. } => { input_state.joypad_1.left = false; } SdlEvent::KeyDown { keycode: Some(SdlKeycode::Right), .. } | SdlEvent::ControllerButtonDown { button: SdlButton::DPadRight, .. } => { input_state.joypad_1.right = true; } SdlEvent::KeyUp { keycode: Some(SdlKeycode::Right), .. } | SdlEvent::ControllerButtonUp { button: SdlButton::DPadRight, .. } => { input_state.joypad_1.right = false; } _ => {} } } input.set_state(input_state); debug!("Input: {:?}", input_state); loop { match Pin::new(&mut run_nes).resume(()) { GeneratorState::Yielded(NesStep::Ppu(PpuStep::Vblank)) => { break; } GeneratorState::Yielded(NesStep::Cpu(nes::cpu::CpuStep::Op(op))) => { trace!("{:X?}", nes.cpu); trace!("${:04X}: {}", op.pc, op.op); trace!("----------"); } GeneratorState::Yielded(_) => {} } } video .copy_to(&mut sdl_canvas) .map_err(LochnesError::Sdl2Error)?; sdl_canvas.present(); let elapsed = frame_start.elapsed(); info!("frame time: {:5.2}ms", elapsed.as_micros() as f64 / 1_000.0); let duration_until_refresh = NES_REFRESH_RATE.checked_sub(elapsed); let sleep_duration = duration_until_refresh.unwrap_or_else(|| Duration::from_secs(0)); thread::sleep(sleep_duration); } Ok(()) } #[derive(Debug)] enum LochnesError { IoError(io::Error), RomError(rom::RomError), Sdl2Error(String), } impl From<io::Error> for LochnesError { fn from(err: io::Error) -> Self { LochnesError::IoError(err) } } impl From<rom::RomError> for LochnesError { fn from(err: rom::RomError) -> Self { LochnesError::RomError(err) } } impl From<sdl2::video::WindowBuildError> for LochnesError { fn from(err: sdl2::video::WindowBuildError) -> Self { LochnesError::Sdl2Error(err.to_string()) } } impl From<sdl2::IntegerOrSdlError> for LochnesError { fn from(err: sdl2::IntegerOrSdlError) -> Self { LochnesError::Sdl2Error(err.to_string()) } } impl From<sdl2::render::TextureValueError> for LochnesError { fn from(err: sdl2::render::TextureValueError) -> Self { LochnesError::Sdl2Error(err.to_string()) } }
use std::fmt; use crate::common::*; #[derive(Clone, Debug, PartialEq)] pub struct Warning { pub check_name: String, line: LineEntry, message: String, } impl Warning { pub fn new(line: LineEntry, check_name: &str, message: String) -> Self { let check_name = String::from(check_name); Self { line, check_name, message, } } pub fn line_number(&self) -> usize { self.line.number } } impl fmt::Display for Warning { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "{} {}: {}", format!("{}:{}", self.line.file, self.line.number).italic(), self.check_name.red().bold(), self.message ) } } #[cfg(test)] mod tests { use super::*; use crate::common::tests::*; #[test] fn warning_fmt_test() { let line = line_entry(1, 1, "FOO=BAR"); let warning = Warning::new( line, "DuplicatedKey", String::from("The FOO key is duplicated"), ); assert_eq!( format!( "{} {}: {}", format!("{}:{}", ".env", "1").italic(), "DuplicatedKey".red().bold(), "The FOO key is duplicated" ), format!("{}", warning) ); } }
use serde::{Deserialize, Serialize}; use sqlx::FromRow; #[derive(FromRow, Deserialize, Serialize)] pub struct Dataset { pub id: i32, pub short_name: String, pub name: String, pub description: String, pub units: String, }
use super::Redis; use crate::{config::user_tags_redis_key, errors::Error}; use octo_budget_lib::auth_token::UserId; use redis::Pipeline; pub async fn increment_tags( user_id: UserId, tags: Vec<String>, redis: &Redis, ) -> Result<(), Error> { let key = user_tags_redis_key(user_id); let mut pipeline = Pipeline::with_capacity(tags.len()); for tag in &tags { pipeline.cmd("zincrby").arg(&key).arg("1").arg(tag); } redis.execute(pipeline).await } pub async fn decrement_tags( user_id: UserId, tags: Vec<String>, redis: &Redis, ) -> Result<(), Error> { let key = user_tags_redis_key(user_id); let mut pipeline = Pipeline::with_capacity(tags.len()); for tag in &tags { pipeline.cmd("zincrby").arg(&key).arg("-1").arg(tag); } pipeline.cmd("zremrangebyscore").arg(&key).arg("0").arg("0"); redis.execute(pipeline).await } pub async fn read_redis_tags(user_id: UserId, redis: &Redis) -> Result<Vec<String>, Error> { let redis_key = user_tags_redis_key(user_id); redis::cmd("zrevrange") .arg(redis_key) .arg("0") .arg("-1") .query_async(&mut redis.connection()) .await .map_err(Into::into) } #[cfg(test)] mod tests { use super::*; use crate::tags_vec; use redis; mod test_redis { use crate::redis::{Redis, RedisConnection}; use serde::export::fmt::Display; pub struct Session(Redis); impl Session { pub async fn new() -> Self { let redis = crate::redis::Redis::new().await; let _: () = redis::cmd("flushall") .query_async(&mut redis.connection()) .await .expect("failed to cleanup redis"); Self(redis) } pub async fn zadd<T: redis::ToRedisArgs + Display>( &mut self, user_id: T, score: T, tag: T, ) { let key = format!("user_tags_{}", user_id); let mut conn = self.conn(); let _: () = redis::cmd("zadd") .arg(key) .arg(score) .arg(tag) .query_async(&mut conn) .await .expect("failed to execute zadd"); } pub fn conn(&mut self) -> RedisConnection { self.0.connection() } pub fn redis(&self) -> &Redis { &self.0 } } } #[actix_rt::test] async fn sorted_tags_if_no_data_stored() { let session = test_redis::Session::new().await; let result = read_redis_tags(user_id_1(), session.redis()).await; assert_eq!(tags_vec!(), result.unwrap()); } #[actix_rt::test] async fn sorted_tags_if_data_exist() { let mut session = test_redis::Session::new().await; let user_id = "1"; session.zadd(user_id, "2", "xxx").await; session.zadd(user_id, "3", "zzz").await; let tags = read_redis_tags(user_id_1(), session.redis()) .await .expect("failed to get tags"); assert_eq!(tags_vec!["zzz", "xxx"], tags); } #[actix_rt::test] async fn get_ordered_tags_with_redis_error() { let mut session = test_redis::Session::new().await; let mut conn = session.conn(); let _: () = redis::cmd("set") .arg("user_tags_1") .arg("foo") .query_async(&mut conn) .await .unwrap(); let result = read_redis_tags(user_id_1(), session.redis()).await; let error = result.unwrap_err().to_string(); assert!( error.contains("WRONGTYPE: Operation against a key holding the wrong kind of value") ); } #[actix_rt::test] async fn sort_tags_with_redis_data() { use crate::apps::helpers::sort_tags; let mut session = test_redis::Session::new().await; let user_id = "1"; session.zadd(user_id, "2", "xxx").await; session.zadd(user_id, "1", "foo").await; session.zadd(user_id, "3", "zzz").await; let redis_tags = read_redis_tags(user_id_1(), session.redis()) .await .expect("failed to get tags"); let user_tags = tags_vec!["foo", "xxx", "zzz"]; let sorted = sort_tags(&redis_tags, &user_tags); assert_eq!(vec!["zzz", "xxx", "foo"], sorted); } #[actix_rt::test] async fn increment_tags_happy_path() { let mut session = test_redis::Session::new().await; let user_id = "1"; // prepare sort order for tags: session.zadd(user_id, "2", "xxx").await; session.zadd(user_id, "1", "foo").await; session.zadd(user_id, "3", "zzz").await; // check result BEFORE incrementing let redis_tags = read_redis_tags(user_id_1(), session.redis()) .await .expect("failed to get tags"); assert_eq!(vec!["zzz", "xxx", "foo"], redis_tags); for _ in 0..3 { increment_tags(user_id_1(), tags_vec!["foo"], session.redis()) .await .expect("failed to increment"); } // check result AFTER incrementing let redis_tags = read_redis_tags(user_id_1(), session.redis()) .await .expect("failed to get tags"); assert_eq!(vec!["foo", "zzz", "xxx"], redis_tags); } #[actix_rt::test] async fn decrement_tags_happy_path() { let mut session = test_redis::Session::new().await; let user_id = "1"; session.zadd(user_id, "5", "xxx").await; session.zadd(user_id, "4", "foo").await; session.zadd(user_id, "6", "zzz").await; // first, let's check initial state let tags = read_redis_tags(user_id_1(), session.redis()) .await .expect("failed to get tags"); assert_eq!(vec!["zzz", "xxx", "foo"], tags); // now let's decrement zzz decrement_tags(user_id_1(), tags_vec!["zzz"], session.redis()) .await .expect("failed to decrement"); // and decrement zzz again decrement_tags(user_id_1(), tags_vec!["zzz"], session.redis()) .await .expect("failed to decrement"); // let's check tags order again let tags = read_redis_tags(user_id_1(), session.redis()) .await .expect("failed to get tags"); // zzz is no longer the first one assert_eq!(vec!["xxx", "zzz", "foo"], tags); } #[actix_rt::test] async fn decrement_tags_and_delete_zeros_happy_path() { let mut session = test_redis::Session::new().await; let user_id = "1"; // prepare sort order for tags: session.zadd(user_id, "2", "xxx").await; session.zadd(user_id, "1", "foo").await; // first, let's check initial state let tags = read_redis_tags(user_id_1(), session.redis()) .await .expect("failed to get tags"); assert_eq!(vec!["xxx", "foo"], tags); decrement_tags(user_id_1(), tags_vec!["xxx", "foo"], session.redis()) .await .expect("failed to decrement"); // let's check tags order again let tags = read_redis_tags(user_id_1(), session.redis()) .await .expect("failed to get tags"); assert_eq!(vec!["xxx"], tags); } fn user_id_1() -> UserId { 1.into() } }
// Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. use super::*; use num_bigint::BigUint; use utils::SliceReader; // BASIC ALGEBRA // ================================================================================================ #[test] fn add() { // identity let r = BaseElement::rand(); assert_eq!(r, r + BaseElement::ZERO); // test addition within bounds assert_eq!( BaseElement::from(5u8), BaseElement::from(2u8) + BaseElement::from(3u8) ); // test overflow let t = BaseElement::from(BaseElement::MODULUS - 1); assert_eq!(BaseElement::ZERO, t + BaseElement::ONE); assert_eq!(BaseElement::ONE, t + BaseElement::from(2u8)); // test random values let r1 = BaseElement::rand(); let r2 = BaseElement::rand(); let expected = (r1.to_big_uint() + r2.to_big_uint()) % BigUint::from(M); let expected = BaseElement::from_big_uint(expected); assert_eq!(expected, r1 + r2); } #[test] fn sub() { // identity let r = BaseElement::rand(); assert_eq!(r, r - BaseElement::ZERO); // test subtraction within bounds assert_eq!( BaseElement::from(2u8), BaseElement::from(5u8) - BaseElement::from(3u8) ); // test underflow let expected = BaseElement::from(BaseElement::MODULUS - 2); assert_eq!(expected, BaseElement::from(3u8) - BaseElement::from(5u8)); } #[test] fn mul() { // identity let r = BaseElement::rand(); assert_eq!(BaseElement::ZERO, r * BaseElement::ZERO); assert_eq!(r, r * BaseElement::ONE); // test multiplication within bounds assert_eq!( BaseElement::from(15u8), BaseElement::from(5u8) * BaseElement::from(3u8) ); // test overflow let m = BaseElement::MODULUS; let t = BaseElement::from(m - 1); assert_eq!(BaseElement::ONE, t * t); assert_eq!(BaseElement::from(m - 2), t * BaseElement::from(2u8)); assert_eq!(BaseElement::from(m - 4), t * BaseElement::from(4u8)); let t = (m + 1) / 2; assert_eq!( BaseElement::ONE, BaseElement::from(t) * BaseElement::from(2u8) ); // test random values let v1 = BaseElement::prng_vector(build_seed(), 1000); let v2 = BaseElement::prng_vector(build_seed(), 1000); for i in 0..v1.len() { let r1 = v1[i]; let r2 = v2[i]; let expected = (r1.to_big_uint() * r2.to_big_uint()) % BigUint::from(M); let expected = BaseElement::from_big_uint(expected); if expected != r1 * r2 { println!("failed for: {} * {}", r1, r2); assert_eq!(expected, r1 * r2); } } } #[test] fn inv() { // identity assert_eq!(BaseElement::ONE, BaseElement::inv(BaseElement::ONE)); assert_eq!(BaseElement::ZERO, BaseElement::inv(BaseElement::ZERO)); // test random values let x = BaseElement::prng_vector(build_seed(), 1000); for i in 0..x.len() { let y = BaseElement::inv(x[i]); assert_eq!(BaseElement::ONE, x[i] * y); } } #[test] fn conjugate() { let a = BaseElement::rand(); let b = a.conjugate(); assert_eq!(a, b); } // ROOTS OF UNITY // ================================================================================================ #[test] fn get_root_of_unity() { let root_40 = BaseElement::get_root_of_unity(40); assert_eq!( BaseElement::from(23953097886125630542083529559205016746u128), root_40 ); assert_eq!(BaseElement::ONE, root_40.exp(u128::pow(2, 40))); let root_39 = BaseElement::get_root_of_unity(39); let expected = root_40.exp(2); assert_eq!(expected, root_39); assert_eq!(BaseElement::ONE, root_39.exp(u128::pow(2, 39))); } #[test] fn test_g_is_2_exp_40_root() { let g = BaseElement::TWO_ADIC_ROOT_OF_UNITY; assert_eq!(g.exp(1u128 << 40), BaseElement::ONE); } // SERIALIZATION / DESERIALIZATION // ================================================================================================ #[test] fn elements_into_bytes() { let source = vec![ BaseElement::new(1), BaseElement::new(2), BaseElement::new(3), BaseElement::new(4), ]; let expected: Vec<u8> = vec![ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; assert_eq!(expected, BaseElement::elements_into_bytes(source)); } #[test] fn elements_as_bytes() { let source = vec![ BaseElement::new(1), BaseElement::new(2), BaseElement::new(3), BaseElement::new(4), ]; let expected: Vec<u8> = vec![ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; assert_eq!(expected, BaseElement::elements_as_bytes(&source)); } #[test] fn bytes_as_elements() { let bytes: Vec<u8> = vec![ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, ]; let expected = vec![ BaseElement::new(1), BaseElement::new(2), BaseElement::new(3), BaseElement::new(4), ]; let result = unsafe { BaseElement::bytes_as_elements(&bytes[..64]) }; assert!(result.is_ok()); assert_eq!(expected, result.unwrap()); let result = unsafe { BaseElement::bytes_as_elements(&bytes) }; assert!(matches!(result, Err(DeserializationError::InvalidValue(_)))); let result = unsafe { BaseElement::bytes_as_elements(&bytes[1..]) }; assert!(matches!(result, Err(DeserializationError::InvalidValue(_)))); } #[test] fn read_elements_from() { let bytes: Vec<u8> = vec![ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, ]; let expected = vec![ BaseElement::new(1), BaseElement::new(2), BaseElement::new(3), BaseElement::new(4), ]; // fill whole target let mut reader = SliceReader::new(&bytes[..64]); let result = BaseElement::read_batch_from(&mut reader, 4); assert!(result.is_ok()); assert_eq!(expected, result.unwrap()); assert_eq!(false, reader.has_more_bytes()); // partial number of elements let mut reader = SliceReader::new(&bytes[..65]); let result = BaseElement::read_batch_from(&mut reader, 4); assert!(result.is_ok()); assert_eq!(expected, result.unwrap()); assert_eq!(true, reader.has_more_bytes()); // invalid element let mut reader = SliceReader::new(&bytes[16..]); let result = BaseElement::read_batch_from(&mut reader, 4); assert!(result.is_err()); match result { Err(err) => { assert!(matches!(err, DeserializationError::InvalidValue(_))); } _ => (), } } // INITIALIZATION // ================================================================================================ #[test] fn zeroed_vector() { let result = BaseElement::zeroed_vector(4); assert_eq!(4, result.len()); for element in result.into_iter() { assert_eq!(BaseElement::ZERO, element); } } #[test] fn prng_vector() { let a = BaseElement::prng_vector([0; 32], 4); assert_eq!(4, a.len()); let b = BaseElement::prng_vector([0; 32], 8); assert_eq!(8, b.len()); for (&a, &b) in a.iter().zip(b.iter()) { assert_eq!(a, b); } let c = BaseElement::prng_vector([1; 32], 4); for (&a, &c) in a.iter().zip(c.iter()) { assert_ne!(a, c); } } // HELPER FUNCTIONS // ================================================================================================ fn build_seed() -> [u8; 32] { let mut result = [0; 32]; let seed = BaseElement::rand().as_bytes().to_vec(); result[..16].copy_from_slice(&seed); result } impl BaseElement { pub fn to_big_uint(&self) -> BigUint { BigUint::from_bytes_le(self.as_bytes()) } pub fn from_big_uint(value: BigUint) -> Self { let bytes = value.to_bytes_le(); let mut buffer = [0u8; 16]; buffer[0..bytes.len()].copy_from_slice(&bytes); BaseElement::try_from(buffer).unwrap() } }
fn main() { let ctx_lines = 2; let query = "mm"; let source = "\ Lorem ipsum dolor sit amet, consectetur adipiscing elit,\n\ sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.\n\ Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.\n\ Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.\n\ Excepteur sint occaecat cupidatat non proident,\n\ sunt in culpa qui officia deserunt mollit anim id est laborum."; // holds the line numbers where matches occur let mut tags: Vec<usize> = vec![]; // contains a vector per match to hold the context lines let mut ctx : Vec<Vec<(usize, String)>> = vec![]; // iterates through the lines recording line numbers where matches are encountered for ( i, line ) in source.lines().enumerate() { if line.contains(query) { // if there is a match then push the result into the tags Vec tags.push(i); // a Vec::with_capacity(n) reserves space for 'n' items - type inference occurs for this let v = Vec::with_capacity(2*ctx_lines + 1); ctx.push(v); } } // if there are no matches then exit the program early if tags.is_empty() { return; } // for each line for ( i, line ) in source.lines().enumerate() { // for each tag for ( j, tag ) in tags.iter().enumerate() { // saturating_sub() returns 0 integer on an integer underflow to avoid panic let lower_bound = tag.saturating_sub(ctx_lines); // value of tag + number of context lines for the upper bound let upper_bound = tag + ctx_lines; // if i is lte upper && gte lower then... if ( i >= lower_bound ) && ( i <= upper_bound ) { // create a String from the current line let line_as_string = String::from(line); // local context is value of i and the new String let local_ctx = ( i, line_as_string ); // push this tuple value onto ctx Vec at index of value j ctx[j].push(local_ctx); } } } for local_ctx in ctx.iter() { // ref line, informs the compiler that we want to borrow the value, not move it for &( i, ref line ) in local_ctx.iter() { let line_num = i + 1; println!("{}: {}", line_num, line); } } }
use std::fmt::Debug; use std::marker::PhantomData; use std::ops::AddAssign; use guion::aliases::ESColor; use guion::env::Env; use guion::style::selector::{StyleSelector, StyleSelectorAppend}; use guion::style::selectag::standard::StdSelectag; use guion::util::border::Border; use guion::style::selectag::StyleSelectag; #[non_exhaustive] #[derive(Clone,Default)] pub struct Selector<E> where E: Env { pub obj: Option<Obj>, pub design: Option<Design>, pub accent: Option<u32>, pub variance: Option<Variance>, pub hovered: Option<bool>, pub focused: Option<bool>, pub pressed: Option<bool>, pub locked: Option<bool>, //pub cursor: Option<StdCursor>, TODO pub border: Option<BorderPtr>, pub _p: PhantomData<E>, //TODO fix lazyness } pub struct SelectorFilled<E> where E: Env { pub obj: Obj, pub design: Design, pub accent: u32, pub variance: Variance, pub hovered: bool, pub focused: bool, pub pressed: bool, pub locked: bool, //pub cursor: StdCursor, TODO pub border: BorderPtr, pub _p: PhantomData<E>, //TODO fix lazyness } #[non_exhaustive] #[derive(Clone,Copy,PartialEq)] pub enum Obj { Default, Background, Foreground, Text, Box, Active, Border, Button, List, TextBox, Label, Scroll, } #[non_exhaustive] #[derive(Clone,Copy,PartialEq)] pub enum Design { Default, Normal, Flat, } #[non_exhaustive] #[derive(Clone,Copy,PartialEq)] pub enum Variance { Default, Normal, OK, Caution, Secondary, } #[non_exhaustive] #[derive(Clone,Copy,PartialEq)] pub enum StdCursor { Default, Arrow, IBeam, Wait, Crosshair, WaitArrow, SizeNWSE, SizeNESW, SizeWE, SizeNS, SizeAll, No, Hand, } #[non_exhaustive] #[derive(Clone,Copy,PartialEq)] pub enum BorderPtr { Default, Outer, Visual, } /*impl<E> Default for Selector<E> where E: Env { #[inline] fn default() -> Self { Self{ obj: Obj::Default, design: Design::Default, accent: 0, variance: Variance::Default, hovered: false, focused: false, pressed: false, locked: false, //cursor: StdCursor::Default, border: BorderPtr::Default, _p: PhantomData } } }*/ impl<E> Selector<E> where E: Env { pub fn filled(&self) -> SelectorFilled<E> { let s = self.clone(); SelectorFilled { obj: s.obj.unwrap_or(Obj::Default), design: s.design.unwrap_or(Design::Default), accent: s.accent.unwrap_or(0), variance: s.variance.unwrap_or(Variance::Default), hovered: s.hovered.unwrap_or(false), focused: s.focused.unwrap_or(false), pressed: s.pressed.unwrap_or(false), locked: s.locked.unwrap_or(false), border: s.border.unwrap_or(BorderPtr::Default), _p: PhantomData, } } } impl<E> StyleSelector<E> for Selector<E> where E: Env { fn and(&self, s: &Self) -> Self { Self{ obj: s.obj.or(self.obj).clone(), design: s.design.or(self.design).clone(), accent: s.accent.or(self.accent).clone(), variance: s.variance.or(self.variance).clone(), hovered: s.hovered.or(self.hovered).clone(), focused: s.focused.or(self.focused).clone(), pressed: s.pressed.or(self.pressed).clone(), locked: s.locked.or(self.locked).clone(), border: s.border.or(self.border).clone(), _p: PhantomData, } } } impl<E> StyleSelectorAppend<StdSelectag<E>,E> for Selector<E> where E: Env { #[inline] fn append(&mut self, v: StdSelectag<E>) { match v { StdSelectag::ObjDefault => self.obj = Some(Obj::Default), StdSelectag::ObjBackground => self.obj = Some(Obj::Background), StdSelectag::ObjForeground => self.obj = Some(Obj::Foreground), StdSelectag::ObjText => self.obj = Some(Obj::Text), StdSelectag::ObjBox => self.obj = Some(Obj::Box), StdSelectag::ObjBorder => self.obj = Some(Obj::Border), StdSelectag::ObjActive => self.obj = Some(Obj::Active), StdSelectag::ObjButton => self.obj = Some(Obj::Button), StdSelectag::ObjList => self.obj = Some(Obj::List), StdSelectag::ObjTextBox => self.obj = Some(Obj::TextBox), StdSelectag::ObjLabel => self.obj = Some(Obj::Label), StdSelectag::ObjScroll => self.obj = Some(Obj::Scroll), StdSelectag::DesignDefault => self.design = Some(Design::Default), StdSelectag::DesignNormal => self.design = Some(Design::Normal), StdSelectag::DesignFlat => self.design = Some(Design::Flat), StdSelectag::Accent(v) => self.accent = Some(v), StdSelectag::VariantDefault => self.variance = Some(Variance::Default), StdSelectag::VariantNormal => self.variance = Some(Variance::Normal), StdSelectag::VariantOK => self.variance = Some(Variance::OK), StdSelectag::VariantCaution => self.variance = Some(Variance::Caution), StdSelectag::VariantSecondary => self.variance = Some(Variance::Secondary), StdSelectag::Hovered(v) => self.hovered = Some(v), StdSelectag::Focused(v) => self.focused = Some(v), StdSelectag::Pressed(v) => self.pressed = Some(v), StdSelectag::Locked(v) => self.locked = Some(v), StdSelectag::BorderDefault => self.border = Some(BorderPtr::Default), StdSelectag::BorderOuter => self.border = Some(BorderPtr::Outer), StdSelectag::BorderVisual => self.border = Some(BorderPtr::Visual), _ => {}, } } } impl<E,T> AddAssign<T> for Selector<E> where Self: StyleSelectorAppend<T,E>, T: StyleSelectag<E>, E: Env { #[inline] fn add_assign(&mut self, v: T) { self.append(v) } } /*impl<E,T> StyleSelectorAppend<&[T]> for Selector<E> where Self: StyleSelectorAppend<T>, T: Clone, E: Env { #[inline] fn attach(&mut self, selectors: &[T]) { for t in selectors { self.attach(t.clone()); } } } impl<E> StyleSelectorAppend<()> for Selector<E> where E: Env { #[inline] fn attach(&mut self, _: ()) {} } impl<E> StyleSelectorAppend<&()> for Selector<E> where E: Env { #[inline] fn attach(&mut self, _: &()) {} } impl<E> StyleSelectorGetStdCursor for Selector<E> where E: Env { #[inline] fn cursor(&self) -> StdCursor { self.cursor } }*/ /*impl<E> Clone for Selector<E> where E: Env { fn clone(&self) -> Self { Self{ obj: self.obj.clone(), design: self.design.clone(), accent: self.accent.clone(), variance: self.variance.clone(), hovered: self.hovered.clone(), focused: self.focused.clone(), pressed: self.pressed.clone(), locked: self.locked.clone(), cursor: self.cursor.clone(), border_ptr: self.border_ptr.clone(), border_mul: self.border_mul.clone(), color_specific: self.color_specific.clone(), } } }*/
use lapin::{BasicProperties, options::BasicPublishOptions}; use super::PublisherOptions; #[derive(Clone, Debug)] pub struct Publisher { options: PublisherOptions } impl Publisher { pub fn new(publisher_options: PublisherOptions) -> Self { Publisher { options: publisher_options } } pub fn publish(&self, message_bytes: Vec<u8>) { let connection = super::connect(&self.options.broker_address); let channel = super::create_channel(&connection); channel .basic_publish( &self.options.exchange_name, &self.options.routing_key, BasicPublishOptions::default(), message_bytes, BasicProperties::default(), ) .wait() .expect("Error publishing message"); super::close_channel(&channel); super::close_connection(&connection); } }
use clap::{App, Arg}; use log::LogLevelFilter; use std::str::FromStr; use Route; use Logger; use http::{Request, Response}; use server::Server; struct Settings { log_level_filter: LogLevelFilter } pub struct Router { routes: Vec<Route> } impl Router { pub fn new() -> Router { let settings = Router::settings(); Logger::init(settings.log_level_filter); Router { routes: Vec::new() } } fn settings() -> Settings { let matches = App::new("constantine") .arg(Arg::with_name("log_level") .long("log_level") .takes_value(true)) .get_matches(); let log_level = matches.value_of("log_level").unwrap_or("info"); let log_level_filter = LogLevelFilter::from_str(log_level).ok().expect("Wrong log_level."); Settings { log_level_filter: log_level_filter } } pub fn register<H: Fn(&mut Request, &mut Response) -> (String) + 'static>(&mut self, method: &'static str, path: &'static str, handler: H) { let route = Route { method: method, path: path, handler: Box::new(handler) }; self.routes.push(route); } pub fn get<H: Fn(&mut Request, &mut Response) -> (String) + 'static>(&mut self, path: &'static str, handler: H) { self.register("GET", path, handler); } pub fn post<H: Fn(&mut Request, &mut Response) -> (String) + 'static>(&mut self, path: &'static str, handler: H) { self.register("POST", path, handler); } pub fn start(self) { Server::start(self.routes); } }