hexsha
stringlengths
40
40
size
int64
32
998k
ext
stringclasses
1 value
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
6
206
max_stars_repo_name
stringlengths
6
110
max_stars_repo_head_hexsha
stringlengths
40
40
max_stars_repo_licenses
listlengths
1
6
max_stars_count
float64
1
368k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
6
206
max_issues_repo_name
stringlengths
6
110
max_issues_repo_head_hexsha
stringlengths
40
40
max_issues_repo_licenses
listlengths
1
6
max_issues_count
float64
1
77k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
6
206
max_forks_repo_name
stringlengths
6
110
max_forks_repo_head_hexsha
stringlengths
40
40
max_forks_repo_licenses
listlengths
1
6
max_forks_count
float64
1
28.6k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
32
998k
avg_line_length
float64
5.99
517
max_line_length
int64
15
222k
alphanum_fraction
float64
0.05
0.98
test_functions
listlengths
1
633
f700019f6cc8e4d84dab4aa28137258fd5f3bf7d
11,234
rs
Rust
src/impls/memory.rs
Zyian/rust-vfs
40fe15a947479054d98b8303f11db123ffd723cc
[ "Apache-2.0" ]
null
null
null
src/impls/memory.rs
Zyian/rust-vfs
40fe15a947479054d98b8303f11db123ffd723cc
[ "Apache-2.0" ]
null
null
null
src/impls/memory.rs
Zyian/rust-vfs
40fe15a947479054d98b8303f11db123ffd723cc
[ "Apache-2.0" ]
null
null
null
//! An ephemeral in-memory file system, intended mainly for unit tests use crate::{FileSystem, VfsFileType}; use crate::{SeekAndRead, VfsMetadata}; use crate::{VfsError, VfsResult}; use core::cmp; use std::collections::HashMap; use std::fmt; use std::fmt::{Debug, Formatter}; use std::io::{Cursor, Read, Seek, SeekFrom, Write}; use std::mem::swap; use std::sync::{Arc, RwLock}; type MemoryFsHandle = Arc<RwLock<MemoryFsImpl>>; /// An ephemeral in-memory file system, intended mainly for unit tests pub struct MemoryFS { handle: MemoryFsHandle, } impl Debug for MemoryFS { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.write_str("In Memory File System") } } impl MemoryFS { /// Create a new in-memory filesystem pub fn new() -> Self { MemoryFS { handle: Arc::new(RwLock::new(MemoryFsImpl::new())), } } fn ensure_has_parent(&self, path: &str) -> VfsResult<()> { let separator = path.rfind('/'); if let Some(index) = separator { if self.exists(&path[..index])? { return Ok(()); } } Err(VfsError::Other { message: format!("Parent path of {} does not exist", path), }) } } impl Default for MemoryFS { fn default() -> Self { Self::new() } } struct WritableFile { content: Cursor<Vec<u8>>, destination: String, fs: MemoryFsHandle, } impl Write for WritableFile { fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { self.content.write(buf) } fn flush(&mut self) -> std::io::Result<()> { self.content.flush() } } impl Drop for WritableFile { fn drop(&mut self) { let mut content = vec![]; swap(&mut content, self.content.get_mut()); self.fs.write().unwrap().files.insert( self.destination.clone(), MemoryFile { file_type: VfsFileType::File, content: Arc::new(content), }, ); } } struct ReadableFile { #[allow(clippy::rc_buffer)] // to allow accessing the same object as writable content: Arc<Vec<u8>>, position: u64, } impl ReadableFile { fn len(&self) -> u64 { self.content.len() as u64 - self.position } } impl Read for ReadableFile { fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> { let amt = cmp::min(buf.len(), self.len() as usize); if amt == 1 { buf[0] = self.content[self.position as usize]; } else { buf[..amt].copy_from_slice( &self.content.as_slice()[self.position as usize..self.position as usize + amt], ); } self.position += amt as u64; Ok(amt) } } impl Seek for ReadableFile { fn seek(&mut self, pos: SeekFrom) -> std::io::Result<u64> { match pos { SeekFrom::Start(offset) => self.position = offset, SeekFrom::Current(offset) => self.position = (self.position as i64 + offset) as u64, SeekFrom::End(offset) => self.position = (self.content.len() as i64 + offset) as u64, } Ok(self.position) } } impl FileSystem for MemoryFS { fn read_dir(&self, path: &str) -> VfsResult<Box<dyn Iterator<Item = String>>> { let prefix = format!("{}/", path); let handle = self.handle.read().unwrap(); let mut found_directory = false; #[allow(clippy::needless_collect)] // need collect to satisfy lifetime requirements let entries: Vec<_> = handle .files .iter() .filter_map(|(candidate_path, _)| { if candidate_path == path { found_directory = true; } if candidate_path.starts_with(&prefix) { let rest = &candidate_path[prefix.len()..]; if !rest.contains('/') { return Some(rest.to_string()); } } None }) .collect(); if !found_directory { return Err(VfsError::FileNotFound { path: path.to_string(), }); } Ok(Box::new(entries.into_iter())) } fn create_dir(&self, path: &str) -> VfsResult<()> { self.ensure_has_parent(path)?; self.handle.write().unwrap().files.insert( path.to_string(), MemoryFile { file_type: VfsFileType::Directory, content: Default::default(), }, ); Ok(()) } fn open_file(&self, path: &str) -> VfsResult<Box<dyn SeekAndRead>> { let handle = self.handle.read().unwrap(); let file = handle .files .get(path) .ok_or_else(|| VfsError::FileNotFound { path: path.to_string(), })?; ensure_file(file)?; Ok(Box::new(ReadableFile { content: file.content.clone(), position: 0, })) } fn create_file(&self, path: &str) -> VfsResult<Box<dyn Write>> { self.ensure_has_parent(path)?; let content = Arc::new(Vec::<u8>::new()); self.handle.write().unwrap().files.insert( path.to_string(), MemoryFile { file_type: VfsFileType::File, content, }, ); let writer = WritableFile { content: Cursor::new(vec![]), destination: path.to_string(), fs: self.handle.clone(), }; Ok(Box::new(writer)) } fn append_file(&self, path: &str) -> VfsResult<Box<dyn Write>> { let handle = self.handle.write().unwrap(); let file = &handle.files[path]; let mut content = Cursor::new(file.content.as_ref().clone()); content.seek(SeekFrom::End(0))?; let writer = WritableFile { content, destination: path.to_string(), fs: self.handle.clone(), }; Ok(Box::new(writer)) } fn metadata(&self, path: &str) -> VfsResult<VfsMetadata> { let guard = self.handle.read().unwrap(); let files = &guard.files; let file = files.get(path).ok_or_else(|| VfsError::FileNotFound { path: path.to_string(), })?; Ok(VfsMetadata { file_type: file.file_type, len: file.content.len() as u64, }) } fn exists(&self, path: &str) -> VfsResult<bool> { Ok(self.handle.read().unwrap().files.contains_key(path)) } fn remove_file(&self, path: &str) -> VfsResult<()> { let mut handle = self.handle.write().unwrap(); handle .files .remove(path) .ok_or_else(|| VfsError::FileNotFound { path: path.to_string(), })?; Ok(()) } fn remove_dir(&self, path: &str) -> VfsResult<()> { if self.read_dir(path)?.next().is_some() { return Err(VfsError::Other { message: "Directory to remove is not empty".to_string(), }); } let mut handle = self.handle.write().unwrap(); handle .files .remove(path) .ok_or_else(|| VfsError::FileNotFound { path: path.to_string(), })?; Ok(()) } } struct MemoryFsImpl { files: HashMap<String, MemoryFile>, } impl MemoryFsImpl { pub fn new() -> Self { let mut files = HashMap::new(); // Add root directory files.insert( "".to_string(), MemoryFile { file_type: VfsFileType::Directory, content: Arc::new(vec![]), }, ); Self { files } } } struct MemoryFile { file_type: VfsFileType, #[allow(clippy::rc_buffer)] // to allow accessing the same object as writable content: Arc<Vec<u8>>, } #[cfg(test)] mod tests { use super::*; use crate::VfsPath; test_vfs!(MemoryFS::new()); #[test] fn write_and_read_file() -> VfsResult<()> { let root = VfsPath::new(MemoryFS::new()); let path = root.join("foobar.txt").unwrap(); let _send = &path as &dyn Send; { let mut file = path.create_file().unwrap(); write!(file, "Hello world").unwrap(); write!(file, "!").unwrap(); } { let mut file = path.open_file().unwrap(); let mut string: String = String::new(); file.read_to_string(&mut string).unwrap(); assert_eq!(string, "Hello world!"); } assert!(path.exists()?); assert!(!root.join("foo").unwrap().exists()?); let metadata = path.metadata().unwrap(); assert_eq!(metadata.len, 12); assert_eq!(metadata.file_type, VfsFileType::File); Ok(()) } #[test] fn append_file() { let root = VfsPath::new(MemoryFS::new()); let _string = String::new(); let path = root.join("test_append.txt").unwrap(); path.create_file().unwrap().write_all(b"Testing 1").unwrap(); path.append_file().unwrap().write_all(b"Testing 2").unwrap(); { let mut file = path.open_file().unwrap(); let mut string: String = String::new(); file.read_to_string(&mut string).unwrap(); assert_eq!(string, "Testing 1Testing 2"); } } #[test] fn create_dir() { let root = VfsPath::new(MemoryFS::new()); let _string = String::new(); let path = root.join("foo").unwrap(); path.create_dir().unwrap(); let metadata = path.metadata().unwrap(); assert_eq!(metadata.file_type, VfsFileType::Directory); } #[test] fn remove_dir_error_message() { let root = VfsPath::new(MemoryFS::new()); let path = root.join("foo").unwrap(); let result = path.remove_dir(); assert_eq!(format!("{}", result.unwrap_err()), "Could not remove directory '/foo', cause: The file or directory `/foo` could not be found"); } #[test] fn read_dir_error_message() { let root = VfsPath::new(MemoryFS::new()); let path = root.join("foo").unwrap(); let result = path.read_dir(); match result { Ok(_) => panic!("Error expected"), Err(err) => { assert_eq!(format!("{}", err), "Could not read directory '/foo', cause: The file or directory `/foo` could not be found"); } } } #[test] fn copy_file_across_filesystems() -> VfsResult<()> { let root_a = VfsPath::new(MemoryFS::new()); let root_b = VfsPath::new(MemoryFS::new()); let src = root_a.join("a.txt")?; let dest = root_b.join("b.txt")?; src.create_file()?.write_all(b"Hello World")?; src.copy_file(&dest)?; assert_eq!(&dest.read_to_string()?, "Hello World"); Ok(()) } } fn ensure_file(file: &MemoryFile) -> VfsResult<()> { if file.file_type != VfsFileType::File { return Err(VfsError::Other { message: "Not a file".to_string(), }); } Ok(()) }
29.798408
148
0.527951
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn write_and_read_file() -> VfsResult<()> {\n let root = VfsPath::new(MemoryFS::new());\n let path = root.join(\"foobar.txt\").unwrap();\n let _send = &path as &dyn Send;\n {\n let mut file = path.create_file().unwrap();\n write!(file, \"Hello world\").unwrap();\n write!(file, \"!\").unwrap();\n }\n {\n let mut file = path.open_file().unwrap();\n let mut string: String = String::new();\n file.read_to_string(&mut string).unwrap();\n assert_eq!(string, \"Hello world!\");\n }\n assert!(path.exists()?);\n assert!(!root.join(\"foo\").unwrap().exists()?);\n let metadata = path.metadata().unwrap();\n assert_eq!(metadata.len, 12);\n assert_eq!(metadata.file_type, VfsFileType::File);\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn append_file() {\n let root = VfsPath::new(MemoryFS::new());\n let _string = String::new();\n let path = root.join(\"test_append.txt\").unwrap();\n path.create_file().unwrap().write_all(b\"Testing 1\").unwrap();\n path.append_file().unwrap().write_all(b\"Testing 2\").unwrap();\n {\n let mut file = path.open_file().unwrap();\n let mut string: String = String::new();\n file.read_to_string(&mut string).unwrap();\n assert_eq!(string, \"Testing 1Testing 2\");\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn create_dir() {\n let root = VfsPath::new(MemoryFS::new());\n let _string = String::new();\n let path = root.join(\"foo\").unwrap();\n path.create_dir().unwrap();\n let metadata = path.metadata().unwrap();\n assert_eq!(metadata.file_type, VfsFileType::Directory);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn remove_dir_error_message() {\n let root = VfsPath::new(MemoryFS::new());\n let path = root.join(\"foo\").unwrap();\n let result = path.remove_dir();\n assert_eq!(format!(\"{}\", result.unwrap_err()), \"Could not remove directory '/foo', cause: The file or directory `/foo` could not be found\");\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn read_dir_error_message() {\n let root = VfsPath::new(MemoryFS::new());\n let path = root.join(\"foo\").unwrap();\n let result = path.read_dir();\n match result {\n Ok(_) => panic!(\"Error expected\"),\n Err(err) => {\n assert_eq!(format!(\"{}\", err), \"Could not read directory '/foo', cause: The file or directory `/foo` could not be found\");\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn copy_file_across_filesystems() -> VfsResult<()> {\n let root_a = VfsPath::new(MemoryFS::new());\n let root_b = VfsPath::new(MemoryFS::new());\n let src = root_a.join(\"a.txt\")?;\n let dest = root_b.join(\"b.txt\")?;\n src.create_file()?.write_all(b\"Hello World\")?;\n src.copy_file(&dest)?;\n assert_eq!(&dest.read_to_string()?, \"Hello World\");\n Ok(())\n }\n}" ]
f700b4631291779bac31279265cd69455dcacb17
899
rs
Rust
src/ppu/control_register/spec_tests.rs
planet-s/rs-nes
d6e15726b30b17736df990762165d541b43394b7
[ "MIT" ]
103
2016-12-06T17:14:33.000Z
2021-09-09T16:42:24.000Z
src/ppu/control_register/spec_tests.rs
planet-s/rs-nes
d6e15726b30b17736df990762165d541b43394b7
[ "MIT" ]
15
2015-07-27T01:20:30.000Z
2019-01-20T20:42:56.000Z
src/ppu/control_register/spec_tests.rs
planet-s/rs-nes
d6e15726b30b17736df990762165d541b43394b7
[ "MIT" ]
3
2017-10-11T01:45:05.000Z
2020-07-24T07:58:57.000Z
use super::*; #[test] fn vram_addr_increment() { let ppu_ctrl = new_control_register(0b00000000); assert_eq!(IncrementAmount::One, ppu_ctrl.vram_addr_increment()); let ppu_ctrl = new_control_register(0b00000100); assert_eq!(IncrementAmount::ThirtyTwo, ppu_ctrl.vram_addr_increment()); } #[test] fn sprite_size() { let ppu_ctrl = new_control_register(0b00000000); assert_eq!(SpriteSize::X8, ppu_ctrl.sprite_size()); let ppu_ctrl = new_control_register(0b00100000); assert_eq!(SpriteSize::X16, ppu_ctrl.sprite_size()); } #[test] fn nmi_on_vblank_start() { let ppu_ctrl = new_control_register(0b00000000); assert_eq!(false, ppu_ctrl.nmi_on_vblank_start()); let ppu_ctrl = new_control_register(0b10000000); assert_eq!(true, ppu_ctrl.nmi_on_vblank_start()); } fn new_control_register(val: u8) -> ControlRegister { ControlRegister { reg: val } }
27.242424
75
0.734149
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn vram_addr_increment() {\n let ppu_ctrl = new_control_register(0b00000000);\n assert_eq!(IncrementAmount::One, ppu_ctrl.vram_addr_increment());\n\n let ppu_ctrl = new_control_register(0b00000100);\n assert_eq!(IncrementAmount::ThirtyTwo, ppu_ctrl.vram_addr_increment());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn sprite_size() {\n let ppu_ctrl = new_control_register(0b00000000);\n assert_eq!(SpriteSize::X8, ppu_ctrl.sprite_size());\n\n let ppu_ctrl = new_control_register(0b00100000);\n assert_eq!(SpriteSize::X16, ppu_ctrl.sprite_size());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn nmi_on_vblank_start() {\n let ppu_ctrl = new_control_register(0b00000000);\n assert_eq!(false, ppu_ctrl.nmi_on_vblank_start());\n\n let ppu_ctrl = new_control_register(0b10000000);\n assert_eq!(true, ppu_ctrl.nmi_on_vblank_start());\n}\n}" ]
f700b7fd9eddfed7dd46360828c9f9a5d963a15f
1,422
rs
Rust
crates/category/tests/find.rs
Nertsal/categories
3fd0a8b4f5c9a3df78c35126bb4af3a9ed10bae3
[ "MIT" ]
1
2021-11-14T14:33:37.000Z
2021-11-14T14:33:37.000Z
crates/category/tests/find.rs
Nertsal/categories
3fd0a8b4f5c9a3df78c35126bb4af3a9ed10bae3
[ "MIT" ]
11
2021-11-14T19:09:44.000Z
2022-03-23T17:08:52.000Z
crates/category/tests/find.rs
Nertsal/categories
3fd0a8b4f5c9a3df78c35126bb4af3a9ed10bae3
[ "MIT" ]
null
null
null
use category::constraint::ConstraintsBuilder; use category::prelude::*; use category::{Bindings, CategoryBuilder}; #[test] fn test_find() { let category = CategoryBuilder::<(), (), (), &str>::new() .object("A", vec![], ()) .object("B", vec![], ()) .object("AxB", vec![ObjectTag::Product("A", "B")], ()) .morphism("p1", "AxB", "A", vec![], ()) .morphism("p2", "AxB", "B", vec![], ()) .morphism("id", "AxB", "AxB", vec![MorphismTag::Identity("AxB")], ()) .build(); let constraints = ConstraintsBuilder::<&str>::new() .object("A", vec![]) .object("B", vec![]) .object("AxB", vec![ObjectTag::Product("A", "B")]) .morphism("p1", "AxB", "A", vec![]) .morphism("p2", "AxB", "B", vec![]) .object("C", vec![]) .morphism("f", "C", "A", vec![]) .morphism("g", "C", "B", vec![]) .morphism("m", "C", "AxB", vec![]) .equality(vec!["m", "p1"], vec!["f"]) .equality(vec!["m", "p2"], vec!["g"]) .build(); let candidates = category .find_candidates(&constraints, &Bindings::new()) .unwrap() .collect::<Vec<_>>(); println!("Candidates for:"); println!(" {constraints:?}"); println!("are:"); for (i, candidate) in candidates.iter().enumerate() { println!("{i:4}) {candidate:?}"); } assert_eq!(candidates.len(), 1); }
33.069767
77
0.483122
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_find() {\n let category = CategoryBuilder::<(), (), (), &str>::new()\n .object(\"A\", vec![], ())\n .object(\"B\", vec![], ())\n .object(\"AxB\", vec![ObjectTag::Product(\"A\", \"B\")], ())\n .morphism(\"p1\", \"AxB\", \"A\", vec![], ())\n .morphism(\"p2\", \"AxB\", \"B\", vec![], ())\n .morphism(\"id\", \"AxB\", \"AxB\", vec![MorphismTag::Identity(\"AxB\")], ())\n .build();\n\n let constraints = ConstraintsBuilder::<&str>::new()\n .object(\"A\", vec![])\n .object(\"B\", vec![])\n .object(\"AxB\", vec![ObjectTag::Product(\"A\", \"B\")])\n .morphism(\"p1\", \"AxB\", \"A\", vec![])\n .morphism(\"p2\", \"AxB\", \"B\", vec![])\n .object(\"C\", vec![])\n .morphism(\"f\", \"C\", \"A\", vec![])\n .morphism(\"g\", \"C\", \"B\", vec![])\n .morphism(\"m\", \"C\", \"AxB\", vec![])\n .equality(vec![\"m\", \"p1\"], vec![\"f\"])\n .equality(vec![\"m\", \"p2\"], vec![\"g\"])\n .build();\n let candidates = category\n .find_candidates(&constraints, &Bindings::new())\n .unwrap()\n .collect::<Vec<_>>();\n\n println!(\"Candidates for:\");\n println!(\" {constraints:?}\");\n println!(\"are:\");\n for (i, candidate) in candidates.iter().enumerate() {\n println!(\"{i:4}) {candidate:?}\");\n }\n\n assert_eq!(candidates.len(), 1);\n}\n}" ]
f700c24903be957b81cc4024bba608c4c4f3039a
4,655
rs
Rust
crypto-ws-client/tests/bybit.rs
xermicus/crypto-crawler-rs
d594bcdcd7aef1b3085dc3270ec398f089b4d66d
[ "Apache-2.0" ]
68
2020-12-31T07:13:11.000Z
2022-03-23T03:36:51.000Z
crypto-ws-client/tests/bybit.rs
xermicus/crypto-crawler-rs
d594bcdcd7aef1b3085dc3270ec398f089b4d66d
[ "Apache-2.0" ]
13
2021-11-11T19:53:06.000Z
2022-03-12T11:55:42.000Z
crypto-ws-client/tests/bybit.rs
xermicus/crypto-crawler-rs
d594bcdcd7aef1b3085dc3270ec398f089b4d66d
[ "Apache-2.0" ]
22
2021-01-02T14:14:14.000Z
2022-03-19T19:27:27.000Z
#[macro_use] mod utils; #[cfg(test)] mod bybit_inverse_future { use crypto_ws_client::{BybitInverseFutureWSClient, WSClient}; use std::sync::mpsc::{Receiver, Sender}; #[test] fn subscribe() { gen_test_code!( BybitInverseFutureWSClient, subscribe, &vec!["trade.BTCUSDZ21".to_string()] ); } #[test] fn subscribe_raw_json() { gen_test_code!( BybitInverseFutureWSClient, subscribe, &vec![r#"{"op":"subscribe","args":["trade.BTCUSDZ21"]}"#.to_string()] ); } #[test] fn subscribe_trade() { gen_test_code!( BybitInverseFutureWSClient, subscribe_trade, &vec!["BTCUSDZ21".to_string()] ); } #[test] fn subscribe_orderbook_topk() { gen_test_code!( BybitInverseFutureWSClient, subscribe_orderbook_topk, &vec!["BTCUSDZ21".to_string()] ); } #[test] fn subscribe_orderbook() { gen_test_code!( BybitInverseFutureWSClient, subscribe_orderbook, &vec!["BTCUSDZ21".to_string()] ); } #[test] fn subscribe_ticker() { gen_test_code!( BybitInverseFutureWSClient, subscribe_ticker, &vec!["BTCUSDZ21".to_string()] ); } #[test] fn subscribe_candlestick() { gen_test_subscribe_candlestick!( BybitInverseFutureWSClient, &vec![("BTCUSDZ21".to_string(), 60)] ); gen_test_subscribe_candlestick!( BybitInverseFutureWSClient, &vec![("BTCUSDZ21".to_string(), 2592000)] ); } } #[cfg(test)] mod bybit_inverse_swap { use crypto_ws_client::{BybitInverseSwapWSClient, WSClient}; use std::sync::mpsc::{Receiver, Sender}; #[test] fn subscribe() { gen_test_code!( BybitInverseSwapWSClient, subscribe, &vec!["trade.BTCUSD".to_string()] ); } #[test] fn subscribe_raw_json() { gen_test_code!( BybitInverseSwapWSClient, subscribe, &vec![r#"{"op":"subscribe","args":["trade.BTCUSD"]}"#.to_string()] ); } #[test] fn subscribe_trade() { gen_test_code!( BybitInverseSwapWSClient, subscribe_trade, &vec!["BTCUSD".to_string()] ); } #[test] fn subscribe_orderbook_topk() { gen_test_code!( BybitInverseSwapWSClient, subscribe_orderbook_topk, &vec!["BTCUSD".to_string()] ); } #[test] fn subscribe_orderbook() { gen_test_code!( BybitInverseSwapWSClient, subscribe_orderbook, &vec!["BTCUSD".to_string()] ); } #[test] fn subscribe_ticker() { gen_test_code!( BybitInverseSwapWSClient, subscribe_ticker, &vec!["BTCUSD".to_string()] ); } #[test] fn subscribe_candlestick() { gen_test_subscribe_candlestick!( BybitInverseSwapWSClient, &vec![("BTCUSD".to_string(), 60)] ); gen_test_subscribe_candlestick!( BybitInverseSwapWSClient, &vec![("BTCUSD".to_string(), 2592000)] ); } } #[cfg(test)] mod bybit_linear_swap { use crypto_ws_client::{BybitLinearSwapWSClient, WSClient}; use std::sync::mpsc::{Receiver, Sender}; #[test] fn subscribe_trade() { gen_test_code!( BybitLinearSwapWSClient, subscribe_trade, &vec!["BTCUSDT".to_string()] ); } #[test] fn subscribe_orderbook_topk() { gen_test_code!( BybitLinearSwapWSClient, subscribe_orderbook_topk, &vec!["BTCUSDT".to_string()] ); } #[test] fn subscribe_orderbook() { gen_test_code!( BybitLinearSwapWSClient, subscribe_orderbook, &vec!["BTCUSDT".to_string()] ); } #[test] fn subscribe_ticker() { gen_test_code!( BybitLinearSwapWSClient, subscribe_ticker, &vec!["BTCUSDT".to_string()] ); } #[test] fn subscribe_candlestick() { gen_test_subscribe_candlestick!( BybitLinearSwapWSClient, &vec![("BTCUSDT".to_string(), 60)] ); gen_test_subscribe_candlestick!( BybitLinearSwapWSClient, &vec![("BTCUSDT".to_string(), 2592000)] ); } }
23.159204
81
0.53319
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe() {\n gen_test_code!(\n BybitInverseFutureWSClient,\n subscribe,\n &vec![\"trade.BTCUSDZ21\".to_string()]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe_raw_json() {\n gen_test_code!(\n BybitInverseFutureWSClient,\n subscribe,\n &vec![r#\"{\"op\":\"subscribe\",\"args\":[\"trade.BTCUSDZ21\"]}\"#.to_string()]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe_trade() {\n gen_test_code!(\n BybitInverseFutureWSClient,\n subscribe_trade,\n &vec![\"BTCUSDZ21\".to_string()]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe_orderbook_topk() {\n gen_test_code!(\n BybitInverseFutureWSClient,\n subscribe_orderbook_topk,\n &vec![\"BTCUSDZ21\".to_string()]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe_orderbook() {\n gen_test_code!(\n BybitInverseFutureWSClient,\n subscribe_orderbook,\n &vec![\"BTCUSDZ21\".to_string()]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe_ticker() {\n gen_test_code!(\n BybitInverseFutureWSClient,\n subscribe_ticker,\n &vec![\"BTCUSDZ21\".to_string()]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe_candlestick() {\n gen_test_subscribe_candlestick!(\n BybitInverseFutureWSClient,\n &vec![(\"BTCUSDZ21\".to_string(), 60)]\n );\n gen_test_subscribe_candlestick!(\n BybitInverseFutureWSClient,\n &vec![(\"BTCUSDZ21\".to_string(), 2592000)]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe() {\n gen_test_code!(\n BybitInverseSwapWSClient,\n subscribe,\n &vec![\"trade.BTCUSD\".to_string()]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe_raw_json() {\n gen_test_code!(\n BybitInverseSwapWSClient,\n subscribe,\n &vec![r#\"{\"op\":\"subscribe\",\"args\":[\"trade.BTCUSD\"]}\"#.to_string()]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe_trade() {\n gen_test_code!(\n BybitInverseSwapWSClient,\n subscribe_trade,\n &vec![\"BTCUSD\".to_string()]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe_orderbook_topk() {\n gen_test_code!(\n BybitInverseSwapWSClient,\n subscribe_orderbook_topk,\n &vec![\"BTCUSD\".to_string()]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe_orderbook() {\n gen_test_code!(\n BybitInverseSwapWSClient,\n subscribe_orderbook,\n &vec![\"BTCUSD\".to_string()]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe_ticker() {\n gen_test_code!(\n BybitInverseSwapWSClient,\n subscribe_ticker,\n &vec![\"BTCUSD\".to_string()]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe_candlestick() {\n gen_test_subscribe_candlestick!(\n BybitInverseSwapWSClient,\n &vec![(\"BTCUSD\".to_string(), 60)]\n );\n gen_test_subscribe_candlestick!(\n BybitInverseSwapWSClient,\n &vec![(\"BTCUSD\".to_string(), 2592000)]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe_trade() {\n gen_test_code!(\n BybitLinearSwapWSClient,\n subscribe_trade,\n &vec![\"BTCUSDT\".to_string()]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe_orderbook_topk() {\n gen_test_code!(\n BybitLinearSwapWSClient,\n subscribe_orderbook_topk,\n &vec![\"BTCUSDT\".to_string()]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe_orderbook() {\n gen_test_code!(\n BybitLinearSwapWSClient,\n subscribe_orderbook,\n &vec![\"BTCUSDT\".to_string()]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe_ticker() {\n gen_test_code!(\n BybitLinearSwapWSClient,\n subscribe_ticker,\n &vec![\"BTCUSDT\".to_string()]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn subscribe_candlestick() {\n gen_test_subscribe_candlestick!(\n BybitLinearSwapWSClient,\n &vec![(\"BTCUSDT\".to_string(), 60)]\n );\n gen_test_subscribe_candlestick!(\n BybitLinearSwapWSClient,\n &vec![(\"BTCUSDT\".to_string(), 2592000)]\n );\n }\n}" ]
f700c3da5c347ccb3bff98f4b254376ca3b73b71
2,711
rs
Rust
tests/query.rs
SimonSapin/warp
6d21e73ac2de6205ee233e4287ff7b52f77b3664
[ "MIT" ]
null
null
null
tests/query.rs
SimonSapin/warp
6d21e73ac2de6205ee233e4287ff7b52f77b3664
[ "MIT" ]
null
null
null
tests/query.rs
SimonSapin/warp
6d21e73ac2de6205ee233e4287ff7b52f77b3664
[ "MIT" ]
null
null
null
#![deny(warnings)] extern crate warp; #[macro_use] extern crate serde_derive; use std::collections::HashMap; use warp::Filter; #[test] fn query() { let as_map = warp::query::<HashMap<String, String>>(); let req = warp::test::request().path("/?foo=bar&baz=quux"); let extracted = req.filter(&as_map).unwrap(); assert_eq!(extracted["foo"], "bar"); assert_eq!(extracted["baz"], "quux"); } #[test] fn query_struct() { let as_struct = warp::query::<MyArgs>(); let req = warp::test::request().path("/?foo=bar&baz=quux"); let extracted = req.filter(&as_struct).unwrap(); assert_eq!( extracted, MyArgs { foo: Some("bar".into()), baz: Some("quux".into()) } ); } #[test] fn empty_query_struct() { let as_struct = warp::query::<MyArgs>(); let req = warp::test::request().path("/?"); let extracted = req.filter(&as_struct).unwrap(); assert_eq!( extracted, MyArgs { foo: None, baz: None } ); } #[test] fn missing_query_struct() { let as_struct = warp::query::<MyArgs>(); let req = warp::test::request().path("/"); let extracted = req.filter(&as_struct).unwrap(); assert_eq!( extracted, MyArgs { foo: None, baz: None } ); } #[derive(Deserialize, Debug, Eq, PartialEq)] struct MyArgs { foo: Option<String>, baz: Option<String>, } #[test] fn required_query_struct() { let as_struct = warp::query::<MyRequiredArgs>(); let req = warp::test::request().path("/?foo=bar&baz=quux"); let extracted = req.filter(&as_struct).unwrap(); assert_eq!( extracted, MyRequiredArgs { foo: "bar".into(), baz: "quux".into() } ); } #[test] fn missing_required_query_struct_partial() { let as_struct = warp::query::<MyRequiredArgs>(); let req = warp::test::request().path("/?foo=something"); let extracted = req.filter(&as_struct); assert!(extracted.is_err()) } #[test] fn missing_required_query_struct_no_query() { let as_struct = warp::query::<MyRequiredArgs>().map(|_| warp::reply()); let req = warp::test::request().path("/"); let res = req.reply(&as_struct); assert_eq!(res.status(), 400); assert_eq!(res.body(), "Invalid query string"); } #[derive(Deserialize, Debug, Eq, PartialEq)] struct MyRequiredArgs { foo: String, baz: String, } #[test] fn raw_query() { let as_raw = warp::query::raw(); let req = warp::test::request().path("/?foo=bar&baz=quux"); let extracted = req.filter(&as_raw).unwrap(); assert_eq!(extracted, "foo=bar&baz=quux".to_owned()); }
21.515873
75
0.582811
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn query() {\n let as_map = warp::query::<HashMap<String, String>>();\n\n let req = warp::test::request().path(\"/?foo=bar&baz=quux\");\n\n let extracted = req.filter(&as_map).unwrap();\n assert_eq!(extracted[\"foo\"], \"bar\");\n assert_eq!(extracted[\"baz\"], \"quux\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn query_struct() {\n let as_struct = warp::query::<MyArgs>();\n\n let req = warp::test::request().path(\"/?foo=bar&baz=quux\");\n\n let extracted = req.filter(&as_struct).unwrap();\n assert_eq!(\n extracted,\n MyArgs {\n foo: Some(\"bar\".into()),\n baz: Some(\"quux\".into())\n }\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn empty_query_struct() {\n let as_struct = warp::query::<MyArgs>();\n\n let req = warp::test::request().path(\"/?\");\n\n let extracted = req.filter(&as_struct).unwrap();\n assert_eq!(\n extracted,\n MyArgs {\n foo: None,\n baz: None\n }\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn missing_query_struct() {\n let as_struct = warp::query::<MyArgs>();\n\n let req = warp::test::request().path(\"/\");\n\n let extracted = req.filter(&as_struct).unwrap();\n assert_eq!(\n extracted,\n MyArgs {\n foo: None,\n baz: None\n }\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn required_query_struct() {\n let as_struct = warp::query::<MyRequiredArgs>();\n\n let req = warp::test::request().path(\"/?foo=bar&baz=quux\");\n\n let extracted = req.filter(&as_struct).unwrap();\n assert_eq!(\n extracted,\n MyRequiredArgs {\n foo: \"bar\".into(),\n baz: \"quux\".into()\n }\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn missing_required_query_struct_partial() {\n let as_struct = warp::query::<MyRequiredArgs>();\n\n let req = warp::test::request().path(\"/?foo=something\");\n\n let extracted = req.filter(&as_struct);\n assert!(extracted.is_err())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn missing_required_query_struct_no_query() {\n let as_struct = warp::query::<MyRequiredArgs>().map(|_| warp::reply());\n\n let req = warp::test::request().path(\"/\");\n\n let res = req.reply(&as_struct);\n assert_eq!(res.status(), 400);\n assert_eq!(res.body(), \"Invalid query string\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn raw_query() {\n let as_raw = warp::query::raw();\n\n let req = warp::test::request().path(\"/?foo=bar&baz=quux\");\n\n let extracted = req.filter(&as_raw).unwrap();\n assert_eq!(extracted, \"foo=bar&baz=quux\".to_owned());\n}\n}" ]
f7010908986b983516c0c14e2e8db609547e7532
1,376
rs
Rust
rust-playground/unionfind/src/quickfind.rs
sravyapulavarthi/algorithm-playground
331fd3eeb4459afe871c36d80f8d01e5002e747a
[ "MIT" ]
85
2017-12-19T19:51:51.000Z
2021-05-26T20:00:39.000Z
rust-playground/unionfind/src/quickfind.rs
sangeetha77/algorithm-playground
331fd3eeb4459afe871c36d80f8d01e5002e747a
[ "MIT" ]
1
2019-01-02T07:00:40.000Z
2019-01-02T07:00:40.000Z
rust-playground/unionfind/src/quickfind.rs
sangeetha77/algorithm-playground
331fd3eeb4459afe871c36d80f8d01e5002e747a
[ "MIT" ]
34
2018-03-29T11:51:53.000Z
2020-11-17T08:24:51.000Z
use UnionFind; #[derive(Debug)] pub struct QuickFind { pub n: usize, pub id: Vec<usize>, } impl UnionFind for QuickFind { fn new(n: usize) -> Self { let id: Vec<usize> = (0..n).collect(); QuickFind { n, id } } fn connected(&mut self, p: usize, q: usize) -> bool { self.id[p] == self.id[q] } fn union(&mut self, p: usize, q:usize) { if self.connected(p, q) { return; } let id_p = self.id[p]; for k in 0..self.n { if self.id[k] == id_p { self.id[k] = self.id[q]; } } } } #[cfg(test)] mod test { use super::*; #[test] fn test_simple_connected() { let mut q = QuickFind::new(10); let i = q.connected(4, 7); let j = q.connected(3, 6); let k = q.connected(1, 2); assert!(!i); assert!(!j); assert!(!k); } #[test] fn test_simple_union() { let mut q = QuickFind::new(10); q.union(4, 3); assert_eq!(q.id, vec![0, 1, 2, 3, 3, 5, 6, 7, 8, 9]); q.union(3, 8); assert_eq!(q.id, vec![0, 1, 2, 8, 8, 5, 6, 7, 8, 9]); let i = q.connected(4, 3); let j = q.connected(8, 3); let k = q.connected(4, 8); assert!(i); assert!(j); assert!(k); } }
19.380282
61
0.43968
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_simple_connected() {\n\n let mut q = QuickFind::new(10);\n \n let i = q.connected(4, 7);\n let j = q.connected(3, 6);\n let k = q.connected(1, 2);\n \n assert!(!i);\n assert!(!j);\n assert!(!k);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_simple_union() {\n\n let mut q = QuickFind::new(10);\n\n q.union(4, 3);\n assert_eq!(q.id, vec![0, 1, 2, 3, 3, 5, 6, 7, 8, 9]);\n\n q.union(3, 8);\n assert_eq!(q.id, vec![0, 1, 2, 8, 8, 5, 6, 7, 8, 9]);\n\n let i = q.connected(4, 3);\n let j = q.connected(8, 3);\n let k = q.connected(4, 8);\n\n assert!(i);\n assert!(j);\n assert!(k);\n }\n}" ]
f7011be990eed39c7e23a73ecec42804e16e4392
4,748
rs
Rust
src/tests.rs
xemwebe/argmin
77697603314afac948a6603f870f1ae32f05e2e1
[ "Apache-2.0", "MIT" ]
null
null
null
src/tests.rs
xemwebe/argmin
77697603314afac948a6603f870f1ae32f05e2e1
[ "Apache-2.0", "MIT" ]
null
null
null
src/tests.rs
xemwebe/argmin
77697603314afac948a6603f870f1ae32f05e2e1
[ "Apache-2.0", "MIT" ]
null
null
null
// Copyright 2019-2020 argmin developers // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or // http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. #![allow(non_snake_case)] use approx::assert_relative_eq; use ndarray::prelude::*; use ndarray::{Array1, Array2}; use crate::prelude::*; use crate::solver::gradientdescent::SteepestDescent; use crate::solver::linesearch::{HagerZhangLineSearch, MoreThuenteLineSearch}; use crate::solver::newton::NewtonCG; use crate::solver::quasinewton::{BFGS, DFP, LBFGS}; #[cfg(feature = "serde1")] use serde::{Deserialize, Serialize}; #[cfg_attr(feature = "serde1", derive(Serialize, Deserialize))] #[derive(Clone, Default, Debug)] struct MaxEntropy { F: Array2<f64>, K: Array1<f64>, param_opt: Array1<f64>, param_init: Array1<f64>, } /// Base test case for a simple constrained entropy maximization problem /// (the machine translation example of Berger et al in /// Computational Linguistics, vol 22, num 1, pp 39--72, 1996.) /// /// Adapted from scipy.optimize.test.test_optimize impl MaxEntropy { fn new() -> MaxEntropy { let F: Array2<f64> = arr2(&[ [1., 1., 1.], [1., 1., 0.], [1., 0., 1.], [1., 0., 0.], [1., 0., 0.], ]); let K: Array1<f64> = arr1(&[1., 0.3, 0.5]); let param_opt: Array1<f64> = arr1(&[0., -0.524869316, 0.487525860]); let param_init: Array1<f64> = arr1(&[0.0, 0.0, 0.0]); MaxEntropy { F, K, param_opt, param_init, } } } impl ArgminOp for MaxEntropy { type Param = Array1<f64>; type Output = f64; type Hessian = Array2<f64>; type Jacobian = (); fn apply(&self, p: &Self::Param) -> Result<Self::Output, Error> { let log_pdot = self.F.dot(&p.t()); let log_z = log_pdot.mapv(|x| x.exp()).sum().ln(); let loss = log_z - self.K.dot(&p.t()); Ok(loss) } fn gradient(&self, p: &Self::Param) -> Result<Self::Param, Error> { let log_pdot = self.F.dot(&p.t()); let log_z = log_pdot.mapv(|x| x.exp()).sum().ln(); let y = (log_pdot - log_z).mapv(|x| x.exp()); let grad = self.F.clone().t().dot(&y) - self.K.clone(); Ok(grad) } fn hessian(&self, p: &Self::Param) -> Result<Self::Hessian, Error> { let log_pdot = self.F.dot(&p.t()); let log_z = log_pdot.mapv(|x| x.exp()).sum().ln(); let y = (log_pdot - log_z).mapv(|x| x.exp()); // TODO: replace with Array2::from_diag when it is available // ndarray#673 let mut y2_diag = Array2::zeros((y.len(), y.len())); y2_diag.diag_mut().assign(&y); let tmp = self.F.clone() - self.F.clone().t().dot(&y); let hess = self.F.clone().t().dot(&y2_diag.dot(&tmp)); Ok(hess) } } macro_rules! entropy_max_tests { ($($name:ident: $solver:expr,)*) => { $( #[test] fn $name() { let cost = MaxEntropy::new(); let res = Executor::new(cost.clone(), $solver, cost.param_init.clone()) .max_iters(100) .run() .unwrap(); assert_relative_eq!( cost.apply(&res.state.param).unwrap(), cost.apply(&cost.param_opt).unwrap(), epsilon = 1e-6 ); } )* } } entropy_max_tests! { test_max_entropy_lbfgs_morethuente: LBFGS::new(MoreThuenteLineSearch::new(), 10), test_max_entropy_lbfgs_hagerzhang: LBFGS::new(HagerZhangLineSearch::new(), 10), test_max_entropy_bfgs: BFGS::new(Array2::eye(3), MoreThuenteLineSearch::new()), test_max_entropy_dfp: DFP::new(Array2::eye(3), MoreThuenteLineSearch::new()), test_max_entropy_newton_cg: NewtonCG::new(MoreThuenteLineSearch::new()), test_max_entropy_steepest_descent: SteepestDescent::new(MoreThuenteLineSearch::new()), } #[test] fn test_lbfgs_func_count() { let cost = MaxEntropy::new(); let linesearch = MoreThuenteLineSearch::new(); let solver = LBFGS::new(linesearch, 10); let res = Executor::new(cost.clone(), solver, cost.param_init.clone()) .max_iters(100) .run() .unwrap(); assert_relative_eq!( cost.apply(&res.state.param).unwrap(), cost.apply(&cost.param_opt).unwrap(), epsilon = 1e-6 ); // Check the number of cost function evaluation and gradient // evaluation with that in scipy assert!(res.state.cost_func_count <= 7); // The following value is 5 in scipy.optimize, but the convergence // criteria is different assert!(res.state.grad_func_count <= 6); }
32.29932
91
0.606361
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lbfgs_func_count() {\n let cost = MaxEntropy::new();\n\n let linesearch = MoreThuenteLineSearch::new();\n let solver = LBFGS::new(linesearch, 10);\n let res = Executor::new(cost.clone(), solver, cost.param_init.clone())\n .max_iters(100)\n .run()\n .unwrap();\n\n assert_relative_eq!(\n cost.apply(&res.state.param).unwrap(),\n cost.apply(&cost.param_opt).unwrap(),\n epsilon = 1e-6\n );\n\n // Check the number of cost function evaluation and gradient\n // evaluation with that in scipy\n assert!(res.state.cost_func_count <= 7);\n // The following value is 5 in scipy.optimize, but the convergence\n // criteria is different\n assert!(res.state.grad_func_count <= 6);\n}\n}" ]
f7016ec1c8cf24eecaa5a0e4f950b5a2a20954df
1,377
rs
Rust
tests/expectations/tests/ctypes-prefix-path.rs
JRF63/rust-bindgen
cb8266620596222b1cd9dbe6551cc1e3e8bb7f72
[ "BSD-3-Clause" ]
1
2021-01-07T18:48:18.000Z
2021-01-07T18:48:18.000Z
tests/expectations/tests/ctypes-prefix-path.rs
JRF63/rust-bindgen
cb8266620596222b1cd9dbe6551cc1e3e8bb7f72
[ "BSD-3-Clause" ]
3
2016-05-31T14:38:04.000Z
2016-07-18T21:18:09.000Z
tests/expectations/tests/ctypes-prefix-path.rs
JRF63/rust-bindgen
cb8266620596222b1cd9dbe6551cc1e3e8bb7f72
[ "BSD-3-Clause" ]
2
2016-05-30T18:46:14.000Z
2016-06-01T08:14:25.000Z
/* automatically generated by rust-bindgen */ #![allow( dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals )] #![no_std] mod libc { pub mod foo { pub type c_int = i32; pub enum c_void {} } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct foo { pub a: libc::foo::c_int, pub b: libc::foo::c_int, pub bar: *mut libc::foo::c_void, } #[test] fn bindgen_test_layout_foo() { assert_eq!( ::core::mem::size_of::<foo>(), 16usize, concat!("Size of: ", stringify!(foo)) ); assert_eq!( ::core::mem::align_of::<foo>(), 8usize, concat!("Alignment of ", stringify!(foo)) ); assert_eq!( unsafe { &(*(::core::ptr::null::<foo>())).a as *const _ as usize }, 0usize, concat!("Offset of field: ", stringify!(foo), "::", stringify!(a)) ); assert_eq!( unsafe { &(*(::core::ptr::null::<foo>())).b as *const _ as usize }, 4usize, concat!("Offset of field: ", stringify!(foo), "::", stringify!(b)) ); assert_eq!( unsafe { &(*(::core::ptr::null::<foo>())).bar as *const _ as usize }, 8usize, concat!("Offset of field: ", stringify!(foo), "::", stringify!(bar)) ); } impl Default for foo { fn default() -> Self { unsafe { ::core::mem::zeroed() } } }
24.157895
77
0.525054
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn bindgen_test_layout_foo() {\n assert_eq!(\n ::core::mem::size_of::<foo>(),\n 16usize,\n concat!(\"Size of: \", stringify!(foo))\n );\n assert_eq!(\n ::core::mem::align_of::<foo>(),\n 8usize,\n concat!(\"Alignment of \", stringify!(foo))\n );\n assert_eq!(\n unsafe { &(*(::core::ptr::null::<foo>())).a as *const _ as usize },\n 0usize,\n concat!(\"Offset of field: \", stringify!(foo), \"::\", stringify!(a))\n );\n assert_eq!(\n unsafe { &(*(::core::ptr::null::<foo>())).b as *const _ as usize },\n 4usize,\n concat!(\"Offset of field: \", stringify!(foo), \"::\", stringify!(b))\n );\n assert_eq!(\n unsafe { &(*(::core::ptr::null::<foo>())).bar as *const _ as usize },\n 8usize,\n concat!(\"Offset of field: \", stringify!(foo), \"::\", stringify!(bar))\n );\n}\n}" ]
f701853727620eb267dc35f65b9a824f16eacb7c
1,016
rs
Rust
tests/match.rs
tcr/hoodlum
e0e1416ecea7ec58a71bcdd7571afe3e426af4b1
[ "Apache-2.0", "MIT" ]
102
2016-08-19T13:02:42.000Z
2022-03-04T22:09:57.000Z
tests/match.rs
tcr/hoodlum
e0e1416ecea7ec58a71bcdd7571afe3e426af4b1
[ "Apache-2.0", "MIT" ]
30
2016-11-04T21:49:29.000Z
2018-11-16T14:29:33.000Z
tests/match.rs
tcr/hoodlum
e0e1416ecea7ec58a71bcdd7571afe3e426af4b1
[ "Apache-2.0", "MIT" ]
5
2016-10-17T07:06:51.000Z
2020-01-23T00:48:57.000Z
extern crate hoodlum; use hoodlum::*; #[test] fn match_or() { let code = r#" match a { 0 | 1 => { } _ => { } } "#; let valid = r#" case (a) 0, 1: begin end default: begin end endcase "#; let res = parse_results(code, hoodlum::hdl_parser::parse_SeqStatement(code)); let out = res.to_verilog(&Default::default()); assert_eq!(out.trim(), valid.trim()); } //TODO #[ignore] #[test] #[should_panic] fn match_without_default() { let code = r#" match a { 0 => { } 1 => { } } "#; let _ = parse_results(code, hoodlum::hdl_parser::parse_SeqStatement(code)); } //TODO #[ignore] #[test] fn match_expr() { let code = r#" match a { 0 => a <= 1, 1 => a <= 2, 2 => a <= 0, } "#; let valid = r#" case (a) 0, 1: begin end default: begin end endcase "#; let res = parse_results(code, hoodlum::hdl_parser::parse_SeqStatement(code)); let out = res.to_verilog(&Default::default()); assert_eq!(out.trim(), valid.trim()); }
14.941176
81
0.556102
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn match_or() {\n let code = r#\"\nmatch a {\n 0 | 1 => { }\n _ => { }\n}\n\"#;\n\n let valid = r#\"\ncase (a)\n 0, 1: begin\n end\n default: begin\n end\nendcase\n\"#;\n\n let res = parse_results(code, hoodlum::hdl_parser::parse_SeqStatement(code));\n let out = res.to_verilog(&Default::default());\n assert_eq!(out.trim(), valid.trim());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn match_without_default() {\n let code = r#\"\nmatch a {\n 0 => { }\n 1 => { }\n}\n\"#;\n\n let _ = parse_results(code, hoodlum::hdl_parser::parse_SeqStatement(code));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn match_expr() {\n let code = r#\"\nmatch a {\n 0 => a <= 1,\n 1 => a <= 2,\n 2 => a <= 0,\n}\n\"#;\n\n let valid = r#\"\ncase (a)\n 0, 1: begin\n end\n default: begin\n end\nendcase\n\"#;\n\n let res = parse_results(code, hoodlum::hdl_parser::parse_SeqStatement(code));\n let out = res.to_verilog(&Default::default());\n assert_eq!(out.trim(), valid.trim());\n}\n}" ]
f70194552609ff6af8171b4e07459adf6d772449
13,731
rs
Rust
amethyst_rendy/src/batch.rs
lambdaxymox/amethyst
698ca6736f5a35cfed9ee73fccce1780e783ea4c
[ "MIT" ]
null
null
null
amethyst_rendy/src/batch.rs
lambdaxymox/amethyst
698ca6736f5a35cfed9ee73fccce1780e783ea4c
[ "MIT" ]
null
null
null
amethyst_rendy/src/batch.rs
lambdaxymox/amethyst
698ca6736f5a35cfed9ee73fccce1780e783ea4c
[ "MIT" ]
null
null
null
//! Module containing structures useful for batching draw calls //! in scenarios with various known assumptions, e.g. order independence. use std::{ collections::hash_map::Entry, iter::{Extend, FromIterator}, ops::Range, }; use derivative::Derivative; use smallvec::{smallvec, SmallVec}; #[cfg(feature = "profiler")] use thread_profiler::profile_scope; use crate::util::TapCountIter; /// Iterator trait for grouping iterated 2-tuples `(K, V)` by contiguous ranges with equal `K`, /// providing access in a group-by-group manner. pub trait GroupIterator<K, V> where Self: Iterator<Item = (K, V)> + Sized, K: PartialEq, { /// Perform grouping. Evaluates passed closure on every next /// contiguous list of data with same group identifier. fn for_each_group<F>(self, on_group: F) where F: FnMut(K, &mut Vec<V>); } // This would be an iterator adaptor if `Item` type would allow a borrow on iterator itself. // FIXME: Implement once `StreamingIterator` is a thing. impl<K, V, I> GroupIterator<K, V> for I where K: PartialEq, I: Iterator<Item = (K, V)>, { fn for_each_group<F>(self, mut on_group: F) where F: FnMut(K, &mut Vec<V>), { #[cfg(feature = "profiler")] profile_scope!("for_each_group"); let mut block: Option<(K, Vec<V>)> = None; for (next_group_id, value) in self { match &mut block { slot @ None => { let mut group_buffer = Vec::with_capacity(64); group_buffer.push(value); slot.replace((next_group_id, group_buffer)); } Some((group_id, group_buffer)) if group_id == &next_group_id => { group_buffer.push(value); } Some((group_id, ref mut group_buffer)) => { let submitted_group_id = std::mem::replace(group_id, next_group_id); on_group(submitted_group_id, group_buffer); group_buffer.clear(); group_buffer.push(value); } } } if let Some((group_id, mut group_buffer)) = block.take() { on_group(group_id, &mut group_buffer); } } } /// Batching implementation which provides two levels of indirection and grouping for a given batch. /// This batch method is used, for example, batching meshes and textures; for any given draw call, /// a user would want to batch all draws using a specific texture together, and then also group all /// draw calls for a specific mesh together. /// /// `PK` - First level of batch grouping /// `SK` - Secondary level of batch grouping /// `C` - the actual final type being batched. /// /// Internally, this batch type is implemented using a `FnvHashMap` for its outer primary batching /// layer. The inner layer is then implemented as a tuple indexed `SmallVec`. #[derive(Derivative, Debug)] #[derivative(Default(bound = ""))] pub struct TwoLevelBatch<PK, SK, C> where PK: Eq + std::hash::Hash, { map: fnv::FnvHashMap<PK, SmallVec<[(SK, C); 1]>>, data_count: usize, } impl<PK, SK, C> TwoLevelBatch<PK, SK, C> where PK: Eq + std::hash::Hash, SK: PartialEq, C: IntoIterator, C: FromIterator<<C as IntoIterator>::Item>, C: Extend<<C as IntoIterator>::Item>, { /// Clears all batch data. pub fn clear_inner(&mut self) { self.data_count = 0; for (_, data) in self.map.iter_mut() { data.clear(); } } /// Removes empty batch indices from internal storage. pub fn prune(&mut self) { self.map.retain(|_, b| !b.is_empty()); } /// Inserts a set of batch items. pub fn insert(&mut self, pk: PK, sk: SK, data: impl IntoIterator<Item = C::Item>) { #[cfg(feature = "profiler")] profile_scope!("twolevel_insert"); let instance_data = data.into_iter().tap_count(&mut self.data_count); match self.map.entry(pk) { Entry::Occupied(mut e) => { let e = e.get_mut(); // scan for the same key to try to combine batches. // Scanning limited slots to limit complexity. if let Some(batch) = e.iter_mut().take(8).find(|(k, _)| k == &sk) { batch.1.extend(instance_data); } else { e.push((sk, instance_data.collect())); } } Entry::Vacant(e) => { e.insert(smallvec![(sk, instance_data.collect())]); } } } /// Returns an iterator over the internally batched raw data. pub fn data(&self) -> impl Iterator<Item = &C> { self.map .iter() .flat_map(|(_, batch)| batch.iter().map(|data| &data.1)) } /// Returns an iterator over the internally batched data, which includes the group keys. pub fn iter(&self) -> impl Iterator<Item = (&PK, impl Iterator<Item = &(SK, C)>)> { self.map.iter().map(|(pk, batch)| (pk, batch.iter())) } /// Returns the number of items currently in this batch. pub fn count(&self) -> usize { self.data_count } } /// Batching implementation which provides two levels of indirection and grouping for a given batch. /// This batch method is used, for example, batching meshes and textures; for any given draw call, /// a user would want to batch all draws using a specific texture together, and then also group all /// draw calls for a specific mesh together. /// /// `PK` - First level of batch grouping /// `SK` - Secondary level of batch grouping /// `D` - the actual final type being batched. /// /// Internally, this batch type is implemented with sorted tuple `Vec` structures. /// /// `OrderedTwoLevelBatch` differs from [TwoLevelBatch] in that it sorts and orders on both levels /// of batching. #[derive(Derivative, Debug)] #[derivative(Default(bound = ""))] pub struct OrderedTwoLevelBatch<PK, SK, D> where PK: PartialEq, SK: PartialEq, { old_pk_list: Vec<(PK, u32)>, old_sk_list: Vec<(SK, Range<u32>)>, pk_list: Vec<(PK, u32)>, sk_list: Vec<(SK, Range<u32>)>, data_list: Vec<D>, } impl<PK, SK, D> OrderedTwoLevelBatch<PK, SK, D> where PK: PartialEq, SK: PartialEq, { /// Clears all data and indices from this batch set. pub fn swap_clear(&mut self) { std::mem::swap(&mut self.old_pk_list, &mut self.pk_list); std::mem::swap(&mut self.old_sk_list, &mut self.sk_list); self.pk_list.clear(); self.sk_list.clear(); self.data_list.clear(); } /// Inserts a set of batch data to the specified grouping. pub fn insert(&mut self, pk: PK, sk: SK, data: impl IntoIterator<Item = D>) { #[cfg(feature = "profiler")] profile_scope!("ordered_twolevel_insert"); let start = self.data_list.len() as u32; self.data_list.extend(data); let end = self.data_list.len() as u32; match (self.pk_list.last_mut(), self.sk_list.last_mut()) { (Some((last_pk, _)), Some((last_sk, last_sk_range))) if last_pk == &pk && last_sk == &sk => { last_sk_range.end = end; } (Some((last_pk, last_pk_len)), _) if last_pk == &pk => { *last_pk_len += 1; self.sk_list.push((sk, start..end)); } _ => { self.pk_list.push((pk, 1)); self.sk_list.push((sk, start..end)); } } } /// Returns the raw storage data of this batch container. pub fn data(&self) -> &Vec<D> { &self.data_list } /// Iterator that returns primary keys and all inner submitted batches pub fn iter<'a>(&'a self) -> impl Iterator<Item = (&'a PK, &[(SK, Range<u32>)])> { let mut pk_offset = 0; self.pk_list.iter().map(move |(pk, pk_len)| { let range = pk_offset..pk_offset + *pk_len as usize; pk_offset += *pk_len as usize; (pk, &self.sk_list[range]) }) } /// Returns true if sorting this batch resulted in a change in order. pub fn changed(&self) -> bool { self.pk_list != self.old_pk_list || self.sk_list != self.old_sk_list } /// Returns the number of items currently in this batch. pub fn count(&self) -> usize { self.data_list.len() } } /// A batching implementation with one level of indexing. Data type `D` batched by primary key `PK`. /// Items with the same `PK` are always grouped. #[derive(Derivative, Debug)] #[derivative(Default(bound = ""))] pub struct OneLevelBatch<PK, D> where PK: Eq + std::hash::Hash, { map: fnv::FnvHashMap<PK, Vec<D>>, data_count: usize, } impl<PK, D> OneLevelBatch<PK, D> where PK: Eq + std::hash::Hash, { /// Clears all data and indices from this batch set. pub fn clear_inner(&mut self) { self.data_count = 0; for (_, data) in self.map.iter_mut() { data.clear(); } } /// Removes any empty grouping indices. pub fn prune(&mut self) { self.map.retain(|_, b| !b.is_empty()); } /// Inserts the provided set of batch data for `PK` pub fn insert(&mut self, pk: PK, data: impl IntoIterator<Item = D>) { #[cfg(feature = "profiler")] profile_scope!("onelevel_insert"); let instance_data = data.into_iter(); match self.map.entry(pk) { Entry::Occupied(mut e) => { let vec = e.get_mut(); let old_len = vec.len(); vec.extend(instance_data); self.data_count += vec.len() - old_len; } Entry::Vacant(e) => { let collected = instance_data.collect::<Vec<_>>(); self.data_count += collected.len(); e.insert(collected); } } } /// Returns an iterator over batched data lists. pub fn data(&self) -> impl Iterator<Item = &Vec<D>> { self.map.values() } /// Returns an iterator over batched values, providing batch `PK` and data list. pub fn iter(&self) -> impl Iterator<Item = (&PK, Range<u32>)> { let mut offset = 0; self.map.iter().map(move |(pk, data)| { let range = offset..offset + data.len() as u32; offset = range.end; (pk, range) }) } /// Returns the number of items currently in this batch. pub fn count(&self) -> usize { self.data_count } } /// A batching implementation with one level of indexing. Data type `D` batched by primary key `PK`. /// /// Items are always kept in insertion order, grouped only by contiguous ranges of equal `PK`. #[derive(Derivative, Debug)] #[derivative(Default(bound = ""))] pub struct OrderedOneLevelBatch<PK, D> where PK: PartialEq, { old_keys: Vec<(PK, u32)>, keys_list: Vec<(PK, u32)>, data_list: Vec<D>, } impl<PK, D> OrderedOneLevelBatch<PK, D> where PK: PartialEq, { /// Clears all data and indices from this batch set. pub fn swap_clear(&mut self) { std::mem::swap(&mut self.old_keys, &mut self.keys_list); self.keys_list.clear(); self.data_list.clear(); } /// Inserts the provided set of batch data for `PK` pub fn insert(&mut self, pk: PK, data: impl IntoIterator<Item = D>) { #[cfg(feature = "profiler")] profile_scope!("ordered_onelevel_insert"); let start = self.data_list.len() as u32; self.data_list.extend(data); let added_len = self.data_list.len() as u32 - start; if added_len == 0 { return; } match self.keys_list.last_mut() { Some((last_pk, last_len)) if last_pk == &pk => { *last_len += added_len; } _ => { self.keys_list.push((pk, added_len)); } } } /// Returns an iterator to raw data for this batch. pub fn data(&self) -> &Vec<D> { &self.data_list } /// Iterator that returns primary keys and lengths of submitted batch pub fn iter(&self) -> impl Iterator<Item = (&PK, Range<u32>)> { let mut offset = 0; self.keys_list.iter().map(move |(pk, size)| { let range = offset..offset + *size; offset = range.end; (pk, range) }) } /// Returns an iterator to raw data for this batch. pub fn changed(&self) -> bool { self.keys_list != self.old_keys } /// Returns the number of items currently in this batch. pub fn count(&self) -> usize { self.data_list.len() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_ordered_onelevel_batch_single_insert() { let mut batch = OrderedOneLevelBatch::<u32, u32>::default(); batch.insert(0, Some(0)); assert_eq!(batch.count(), 1); assert_eq!(batch.iter().collect::<Vec<_>>(), vec![(&0, 0..1)]); } #[test] fn test_ordered_onelevel_batch_insert_existing() { let mut batch = OrderedOneLevelBatch::<u32, u32>::default(); batch.insert(0, Some(0)); batch.insert(0, Some(1)); batch.insert(1, Some(0)); assert_eq!(batch.count(), 3); assert_eq!( batch.iter().collect::<Vec<_>>(), vec![(&0, 0..2), (&1, 2..3)] ); } #[test] fn test_ordered_onelevel_batch_empty_insert() { let mut batch = OrderedOneLevelBatch::<u32, u32>::default(); batch.insert(0, None); assert_eq!(batch.count(), 0); assert_eq!(batch.iter().collect::<Vec<_>>(), vec![]); } }
32.156909
100
0.57738
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_ordered_onelevel_batch_single_insert() {\n let mut batch = OrderedOneLevelBatch::<u32, u32>::default();\n batch.insert(0, Some(0));\n assert_eq!(batch.count(), 1);\n assert_eq!(batch.iter().collect::<Vec<_>>(), vec![(&0, 0..1)]);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_ordered_onelevel_batch_insert_existing() {\n let mut batch = OrderedOneLevelBatch::<u32, u32>::default();\n batch.insert(0, Some(0));\n batch.insert(0, Some(1));\n batch.insert(1, Some(0));\n assert_eq!(batch.count(), 3);\n assert_eq!(\n batch.iter().collect::<Vec<_>>(),\n vec![(&0, 0..2), (&1, 2..3)]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_ordered_onelevel_batch_empty_insert() {\n let mut batch = OrderedOneLevelBatch::<u32, u32>::default();\n batch.insert(0, None);\n assert_eq!(batch.count(), 0);\n assert_eq!(batch.iter().collect::<Vec<_>>(), vec![]);\n }\n}" ]
f701a7312a3bcea3ecd9bfd2bf7fa634a8eba26c
69,192
rs
Rust
src/sys/socket/mod.rs
sporksmith/nix
4d8504bee10778d37a804f812e865d7440f2c3b9
[ "MIT" ]
null
null
null
src/sys/socket/mod.rs
sporksmith/nix
4d8504bee10778d37a804f812e865d7440f2c3b9
[ "MIT" ]
null
null
null
src/sys/socket/mod.rs
sporksmith/nix
4d8504bee10778d37a804f812e865d7440f2c3b9
[ "MIT" ]
null
null
null
//! Socket interface functions //! //! [Further reading](https://man7.org/linux/man-pages/man7/socket.7.html) use cfg_if::cfg_if; use crate::{Error, Result, errno::Errno}; use libc::{self, c_void, c_int, iovec, socklen_t, size_t, CMSG_FIRSTHDR, CMSG_NXTHDR, CMSG_DATA, CMSG_LEN}; use memoffset::offset_of; use std::{mem, ptr, slice}; use std::os::unix::io::RawFd; #[cfg(all(target_os = "linux"))] use crate::sys::time::TimeSpec; use crate::sys::time::TimeVal; use crate::sys::uio::IoVec; mod addr; pub mod sockopt; /* * * ===== Re-exports ===== * */ #[cfg(not(any(target_os = "illumos", target_os = "solaris")))] pub use self::addr::{ AddressFamily, SockAddr, InetAddr, UnixAddr, IpAddr, Ipv4Addr, Ipv6Addr, LinkAddr, }; #[cfg(any(target_os = "illumos", target_os = "solaris"))] pub use self::addr::{ AddressFamily, SockAddr, InetAddr, UnixAddr, IpAddr, Ipv4Addr, Ipv6Addr, }; #[cfg(any(target_os = "android", target_os = "linux"))] pub use crate::sys::socket::addr::netlink::NetlinkAddr; #[cfg(any(target_os = "android", target_os = "linux"))] pub use crate::sys::socket::addr::alg::AlgAddr; #[cfg(any(target_os = "android", target_os = "linux"))] pub use crate::sys::socket::addr::vsock::VsockAddr; pub use libc::{ cmsghdr, msghdr, sa_family_t, sockaddr, sockaddr_in, sockaddr_in6, sockaddr_storage, sockaddr_un, }; // Needed by the cmsg_space macro #[doc(hidden)] pub use libc::{c_uint, CMSG_SPACE}; /// These constants are used to specify the communication semantics /// when creating a socket with [`socket()`](fn.socket.html) #[derive(Clone, Copy, PartialEq, Eq, Debug)] #[repr(i32)] #[non_exhaustive] pub enum SockType { /// Provides sequenced, reliable, two-way, connection- /// based byte streams. An out-of-band data transmission /// mechanism may be supported. Stream = libc::SOCK_STREAM, /// Supports datagrams (connectionless, unreliable /// messages of a fixed maximum length). Datagram = libc::SOCK_DGRAM, /// Provides a sequenced, reliable, two-way connection- /// based data transmission path for datagrams of fixed /// maximum length; a consumer is required to read an /// entire packet with each input system call. SeqPacket = libc::SOCK_SEQPACKET, /// Provides raw network protocol access. Raw = libc::SOCK_RAW, /// Provides a reliable datagram layer that does not /// guarantee ordering. Rdm = libc::SOCK_RDM, } /// Constants used in [`socket`](fn.socket.html) and [`socketpair`](fn.socketpair.html) /// to specify the protocol to use. #[repr(i32)] #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] #[non_exhaustive] pub enum SockProtocol { /// TCP protocol ([ip(7)](https://man7.org/linux/man-pages/man7/ip.7.html)) Tcp = libc::IPPROTO_TCP, /// UDP protocol ([ip(7)](https://man7.org/linux/man-pages/man7/ip.7.html)) Udp = libc::IPPROTO_UDP, /// Allows applications and other KEXTs to be notified when certain kernel events occur /// ([ref](https://developer.apple.com/library/content/documentation/Darwin/Conceptual/NKEConceptual/control/control.html)) #[cfg(any(target_os = "ios", target_os = "macos"))] KextEvent = libc::SYSPROTO_EVENT, /// Allows applications to configure and control a KEXT /// ([ref](https://developer.apple.com/library/content/documentation/Darwin/Conceptual/NKEConceptual/control/control.html)) #[cfg(any(target_os = "ios", target_os = "macos"))] KextControl = libc::SYSPROTO_CONTROL, /// Receives routing and link updates and may be used to modify the routing tables (both IPv4 and IPv6), IP addresses, link // parameters, neighbor setups, queueing disciplines, traffic classes and packet classifiers /// ([ref](https://www.man7.org/linux/man-pages/man7/netlink.7.html)) #[cfg(any(target_os = "android", target_os = "linux"))] NetlinkRoute = libc::NETLINK_ROUTE, /// Reserved for user-mode socket protocols /// ([ref](https://www.man7.org/linux/man-pages/man7/netlink.7.html)) #[cfg(any(target_os = "android", target_os = "linux"))] NetlinkUserSock = libc::NETLINK_USERSOCK, /// Query information about sockets of various protocol families from the kernel /// ([ref](https://www.man7.org/linux/man-pages/man7/netlink.7.html)) #[cfg(any(target_os = "android", target_os = "linux"))] NetlinkSockDiag = libc::NETLINK_SOCK_DIAG, /// SELinux event notifications. /// ([ref](https://www.man7.org/linux/man-pages/man7/netlink.7.html)) #[cfg(any(target_os = "android", target_os = "linux"))] NetlinkSELinux = libc::NETLINK_SELINUX, /// Open-iSCSI /// ([ref](https://www.man7.org/linux/man-pages/man7/netlink.7.html)) #[cfg(any(target_os = "android", target_os = "linux"))] NetlinkISCSI = libc::NETLINK_ISCSI, /// Auditing /// ([ref](https://www.man7.org/linux/man-pages/man7/netlink.7.html)) #[cfg(any(target_os = "android", target_os = "linux"))] NetlinkAudit = libc::NETLINK_AUDIT, /// Access to FIB lookup from user space /// ([ref](https://www.man7.org/linux/man-pages/man7/netlink.7.html)) #[cfg(any(target_os = "android", target_os = "linux"))] NetlinkFIBLookup = libc::NETLINK_FIB_LOOKUP, /// Netfilter subsystem /// ([ref](https://www.man7.org/linux/man-pages/man7/netlink.7.html)) #[cfg(any(target_os = "android", target_os = "linux"))] NetlinkNetFilter = libc::NETLINK_NETFILTER, /// SCSI Transports /// ([ref](https://www.man7.org/linux/man-pages/man7/netlink.7.html)) #[cfg(any(target_os = "android", target_os = "linux"))] NetlinkSCSITransport = libc::NETLINK_SCSITRANSPORT, /// Infiniband RDMA /// ([ref](https://www.man7.org/linux/man-pages/man7/netlink.7.html)) #[cfg(any(target_os = "android", target_os = "linux"))] NetlinkRDMA = libc::NETLINK_RDMA, /// Transport IPv6 packets from netfilter to user space. Used by ip6_queue kernel module. /// ([ref](https://www.man7.org/linux/man-pages/man7/netlink.7.html)) #[cfg(any(target_os = "android", target_os = "linux"))] NetlinkIPv6Firewall = libc::NETLINK_IP6_FW, /// DECnet routing messages /// ([ref](https://www.man7.org/linux/man-pages/man7/netlink.7.html)) #[cfg(any(target_os = "android", target_os = "linux"))] NetlinkDECNetRoutingMessage = libc::NETLINK_DNRTMSG, /// Kernel messages to user space /// ([ref](https://www.man7.org/linux/man-pages/man7/netlink.7.html)) #[cfg(any(target_os = "android", target_os = "linux"))] NetlinkKObjectUEvent = libc::NETLINK_KOBJECT_UEVENT, /// Netlink interface to request information about ciphers registered with the kernel crypto API as well as allow /// configuration of the kernel crypto API. /// ([ref](https://www.man7.org/linux/man-pages/man7/netlink.7.html)) #[cfg(any(target_os = "android", target_os = "linux"))] NetlinkCrypto = libc::NETLINK_CRYPTO, } libc_bitflags!{ /// Additional socket options pub struct SockFlag: c_int { /// Set non-blocking mode on the new socket #[cfg(any(target_os = "android", target_os = "dragonfly", target_os = "freebsd", target_os = "illumos", target_os = "linux", target_os = "netbsd", target_os = "openbsd"))] SOCK_NONBLOCK; /// Set close-on-exec on the new descriptor #[cfg(any(target_os = "android", target_os = "dragonfly", target_os = "freebsd", target_os = "illumos", target_os = "linux", target_os = "netbsd", target_os = "openbsd"))] SOCK_CLOEXEC; /// Return `EPIPE` instead of raising `SIGPIPE` #[cfg(target_os = "netbsd")] SOCK_NOSIGPIPE; /// For domains `AF_INET(6)`, only allow `connect(2)`, `sendto(2)`, or `sendmsg(2)` /// to the DNS port (typically 53) #[cfg(target_os = "openbsd")] SOCK_DNS; } } libc_bitflags!{ /// Flags for send/recv and their relatives pub struct MsgFlags: c_int { /// Sends or requests out-of-band data on sockets that support this notion /// (e.g., of type [`Stream`](enum.SockType.html)); the underlying protocol must also /// support out-of-band data. MSG_OOB; /// Peeks at an incoming message. The data is treated as unread and the next /// [`recv()`](fn.recv.html) /// or similar function shall still return this data. MSG_PEEK; /// Receive operation blocks until the full amount of data can be /// returned. The function may return smaller amount of data if a signal /// is caught, an error or disconnect occurs. MSG_WAITALL; /// Enables nonblocking operation; if the operation would block, /// `EAGAIN` or `EWOULDBLOCK` is returned. This provides similar /// behavior to setting the `O_NONBLOCK` flag /// (via the [`fcntl`](../../fcntl/fn.fcntl.html) /// `F_SETFL` operation), but differs in that `MSG_DONTWAIT` is a per- /// call option, whereas `O_NONBLOCK` is a setting on the open file /// description (see [open(2)](https://man7.org/linux/man-pages/man2/open.2.html)), /// which will affect all threads in /// the calling process and as well as other processes that hold /// file descriptors referring to the same open file description. MSG_DONTWAIT; /// Receive flags: Control Data was discarded (buffer too small) MSG_CTRUNC; /// For raw ([`Packet`](addr/enum.AddressFamily.html)), Internet datagram /// (since Linux 2.4.27/2.6.8), /// netlink (since Linux 2.6.22) and UNIX datagram (since Linux 3.4) /// sockets: return the real length of the packet or datagram, even /// when it was longer than the passed buffer. Not implemented for UNIX /// domain ([unix(7)](https://linux.die.net/man/7/unix)) sockets. /// /// For use with Internet stream sockets, see [tcp(7)](https://linux.die.net/man/7/tcp). MSG_TRUNC; /// Terminates a record (when this notion is supported, as for /// sockets of type [`SeqPacket`](enum.SockType.html)). MSG_EOR; /// This flag specifies that queued errors should be received from /// the socket error queue. (For more details, see /// [recvfrom(2)](https://linux.die.net/man/2/recvfrom)) #[cfg(any(target_os = "android", target_os = "linux"))] MSG_ERRQUEUE; /// Set the `close-on-exec` flag for the file descriptor received via a UNIX domain /// file descriptor using the `SCM_RIGHTS` operation (described in /// [unix(7)](https://linux.die.net/man/7/unix)). /// This flag is useful for the same reasons as the `O_CLOEXEC` flag of /// [open(2)](https://pubs.opengroup.org/onlinepubs/9699919799/functions/open.html). /// /// Only used in [`recvmsg`](fn.recvmsg.html) function. #[cfg(any(target_os = "android", target_os = "dragonfly", target_os = "freebsd", target_os = "linux", target_os = "netbsd", target_os = "openbsd"))] MSG_CMSG_CLOEXEC; } } cfg_if! { if #[cfg(any(target_os = "android", target_os = "linux"))] { /// Unix credentials of the sending process. /// /// This struct is used with the `SO_PEERCRED` ancillary message /// and the `SCM_CREDENTIALS` control message for UNIX sockets. #[repr(transparent)] #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub struct UnixCredentials(libc::ucred); impl UnixCredentials { /// Creates a new instance with the credentials of the current process pub fn new() -> Self { UnixCredentials(libc::ucred { pid: crate::unistd::getpid().as_raw(), uid: crate::unistd::getuid().as_raw(), gid: crate::unistd::getgid().as_raw(), }) } /// Returns the process identifier pub fn pid(&self) -> libc::pid_t { self.0.pid } /// Returns the user identifier pub fn uid(&self) -> libc::uid_t { self.0.uid } /// Returns the group identifier pub fn gid(&self) -> libc::gid_t { self.0.gid } } impl Default for UnixCredentials { fn default() -> Self { Self::new() } } impl From<libc::ucred> for UnixCredentials { fn from(cred: libc::ucred) -> Self { UnixCredentials(cred) } } impl Into<libc::ucred> for UnixCredentials { fn into(self) -> libc::ucred { self.0 } } } else if #[cfg(any(target_os = "freebsd", target_os = "dragonfly"))] { /// Unix credentials of the sending process. /// /// This struct is used with the `SCM_CREDS` ancillary message for UNIX sockets. #[repr(transparent)] #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub struct UnixCredentials(libc::cmsgcred); impl UnixCredentials { /// Returns the process identifier pub fn pid(&self) -> libc::pid_t { self.0.cmcred_pid } /// Returns the real user identifier pub fn uid(&self) -> libc::uid_t { self.0.cmcred_uid } /// Returns the effective user identifier pub fn euid(&self) -> libc::uid_t { self.0.cmcred_euid } /// Returns the real group identifier pub fn gid(&self) -> libc::gid_t { self.0.cmcred_gid } /// Returns a list group identifiers (the first one being the effective GID) pub fn groups(&self) -> &[libc::gid_t] { unsafe { slice::from_raw_parts(self.0.cmcred_groups.as_ptr() as *const libc::gid_t, self.0.cmcred_ngroups as _) } } } impl From<libc::cmsgcred> for UnixCredentials { fn from(cred: libc::cmsgcred) -> Self { UnixCredentials(cred) } } } } cfg_if!{ if #[cfg(any( target_os = "dragonfly", target_os = "freebsd", target_os = "macos", target_os = "ios" ))] { /// Return type of [`LocalPeerCred`](crate::sys::socket::sockopt::LocalPeerCred) #[repr(transparent)] #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub struct XuCred(libc::xucred); impl XuCred { /// Structure layout version pub fn version(&self) -> u32 { self.0.cr_version } /// Effective user ID pub fn uid(&self) -> libc::uid_t { self.0.cr_uid } /// Returns a list of group identifiers (the first one being the /// effective GID) pub fn groups(&self) -> &[libc::gid_t] { &self.0.cr_groups } } } } /// Request for multicast socket operations /// /// This is a wrapper type around `ip_mreq`. #[repr(transparent)] #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub struct IpMembershipRequest(libc::ip_mreq); impl IpMembershipRequest { /// Instantiate a new `IpMembershipRequest` /// /// If `interface` is `None`, then `Ipv4Addr::any()` will be used for the interface. pub fn new(group: Ipv4Addr, interface: Option<Ipv4Addr>) -> Self { IpMembershipRequest(libc::ip_mreq { imr_multiaddr: group.0, imr_interface: interface.unwrap_or_else(Ipv4Addr::any).0, }) } } /// Request for ipv6 multicast socket operations /// /// This is a wrapper type around `ipv6_mreq`. #[repr(transparent)] #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub struct Ipv6MembershipRequest(libc::ipv6_mreq); impl Ipv6MembershipRequest { /// Instantiate a new `Ipv6MembershipRequest` pub const fn new(group: Ipv6Addr) -> Self { Ipv6MembershipRequest(libc::ipv6_mreq { ipv6mr_multiaddr: group.0, ipv6mr_interface: 0, }) } } /// Create a buffer large enough for storing some control messages as returned /// by [`recvmsg`](fn.recvmsg.html). /// /// # Examples /// /// ``` /// # #[macro_use] extern crate nix; /// # use nix::sys::time::TimeVal; /// # use std::os::unix::io::RawFd; /// # fn main() { /// // Create a buffer for a `ControlMessageOwned::ScmTimestamp` message /// let _ = cmsg_space!(TimeVal); /// // Create a buffer big enough for a `ControlMessageOwned::ScmRights` message /// // with two file descriptors /// let _ = cmsg_space!([RawFd; 2]); /// // Create a buffer big enough for a `ControlMessageOwned::ScmRights` message /// // and a `ControlMessageOwned::ScmTimestamp` message /// let _ = cmsg_space!(RawFd, TimeVal); /// # } /// ``` // Unfortunately, CMSG_SPACE isn't a const_fn, or else we could return a // stack-allocated array. #[macro_export] macro_rules! cmsg_space { ( $( $x:ty ),* ) => { { let mut space = 0; $( // CMSG_SPACE is always safe space += unsafe { $crate::sys::socket::CMSG_SPACE(::std::mem::size_of::<$x>() as $crate::sys::socket::c_uint) } as usize; )* Vec::<u8>::with_capacity(space) } } } #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub struct RecvMsg<'a> { pub bytes: usize, cmsghdr: Option<&'a cmsghdr>, pub address: Option<SockAddr>, pub flags: MsgFlags, mhdr: msghdr, } impl<'a> RecvMsg<'a> { /// Iterate over the valid control messages pointed to by this /// msghdr. pub fn cmsgs(&self) -> CmsgIterator { CmsgIterator { cmsghdr: self.cmsghdr, mhdr: &self.mhdr } } } #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub struct CmsgIterator<'a> { /// Control message buffer to decode from. Must adhere to cmsg alignment. cmsghdr: Option<&'a cmsghdr>, mhdr: &'a msghdr } impl<'a> Iterator for CmsgIterator<'a> { type Item = ControlMessageOwned; fn next(&mut self) -> Option<ControlMessageOwned> { match self.cmsghdr { None => None, // No more messages Some(hdr) => { // Get the data. // Safe if cmsghdr points to valid data returned by recvmsg(2) let cm = unsafe { Some(ControlMessageOwned::decode_from(hdr))}; // Advance the internal pointer. Safe if mhdr and cmsghdr point // to valid data returned by recvmsg(2) self.cmsghdr = unsafe { let p = CMSG_NXTHDR(self.mhdr as *const _, hdr as *const _); p.as_ref() }; cm } } } } /// A type-safe wrapper around a single control message, as used with /// [`recvmsg`](#fn.recvmsg). /// /// [Further reading](https://man7.org/linux/man-pages/man3/cmsg.3.html) // Nix version 0.13.0 and earlier used ControlMessage for both recvmsg and // sendmsg. However, on some platforms the messages returned by recvmsg may be // unaligned. ControlMessageOwned takes those messages by copy, obviating any // alignment issues. // // See https://github.com/nix-rust/nix/issues/999 #[derive(Clone, Debug, Eq, PartialEq)] #[non_exhaustive] pub enum ControlMessageOwned { /// Received version of /// [`ControlMessage::ScmRights`][#enum.ControlMessage.html#variant.ScmRights] ScmRights(Vec<RawFd>), /// Received version of /// [`ControlMessage::ScmCredentials`][#enum.ControlMessage.html#variant.ScmCredentials] #[cfg(any(target_os = "android", target_os = "linux"))] ScmCredentials(UnixCredentials), /// Received version of /// [`ControlMessage::ScmCreds`][#enum.ControlMessage.html#variant.ScmCreds] #[cfg(any(target_os = "freebsd", target_os = "dragonfly"))] ScmCreds(UnixCredentials), /// A message of type `SCM_TIMESTAMP`, containing the time the /// packet was received by the kernel. /// /// See the kernel's explanation in "SO_TIMESTAMP" of /// [networking/timestamping](https://www.kernel.org/doc/Documentation/networking/timestamping.txt). /// /// # Examples /// /// ``` /// # #[macro_use] extern crate nix; /// # use nix::sys::socket::*; /// # use nix::sys::uio::IoVec; /// # use nix::sys::time::*; /// # use std::time::*; /// # fn main() { /// // Set up /// let message = "Ohayō!".as_bytes(); /// let in_socket = socket( /// AddressFamily::Inet, /// SockType::Datagram, /// SockFlag::empty(), /// None).unwrap(); /// setsockopt(in_socket, sockopt::ReceiveTimestamp, &true).unwrap(); /// let localhost = InetAddr::new(IpAddr::new_v4(127, 0, 0, 1), 0); /// bind(in_socket, &SockAddr::new_inet(localhost)).unwrap(); /// let address = getsockname(in_socket).unwrap(); /// // Get initial time /// let time0 = SystemTime::now(); /// // Send the message /// let iov = [IoVec::from_slice(message)]; /// let flags = MsgFlags::empty(); /// let l = sendmsg(in_socket, &iov, &[], flags, Some(&address)).unwrap(); /// assert_eq!(message.len(), l); /// // Receive the message /// let mut buffer = vec![0u8; message.len()]; /// let mut cmsgspace = cmsg_space!(TimeVal); /// let iov = [IoVec::from_mut_slice(&mut buffer)]; /// let r = recvmsg(in_socket, &iov, Some(&mut cmsgspace), flags).unwrap(); /// let rtime = match r.cmsgs().next() { /// Some(ControlMessageOwned::ScmTimestamp(rtime)) => rtime, /// Some(_) => panic!("Unexpected control message"), /// None => panic!("No control message") /// }; /// // Check the final time /// let time1 = SystemTime::now(); /// // the packet's received timestamp should lie in-between the two system /// // times, unless the system clock was adjusted in the meantime. /// let rduration = Duration::new(rtime.tv_sec() as u64, /// rtime.tv_usec() as u32 * 1000); /// assert!(time0.duration_since(UNIX_EPOCH).unwrap() <= rduration); /// assert!(rduration <= time1.duration_since(UNIX_EPOCH).unwrap()); /// // Close socket /// nix::unistd::close(in_socket).unwrap(); /// # } /// ``` ScmTimestamp(TimeVal), /// Nanoseconds resolution timestamp /// /// [Further reading](https://www.kernel.org/doc/html/latest/networking/timestamping.html) #[cfg(all(target_os = "linux"))] ScmTimestampns(TimeSpec), #[cfg(any( target_os = "android", target_os = "ios", target_os = "linux", target_os = "macos", target_os = "netbsd", ))] Ipv4PacketInfo(libc::in_pktinfo), #[cfg(any( target_os = "android", target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "linux", target_os = "macos", target_os = "openbsd", target_os = "netbsd", ))] Ipv6PacketInfo(libc::in6_pktinfo), #[cfg(any( target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "netbsd", target_os = "openbsd", ))] Ipv4RecvIf(libc::sockaddr_dl), #[cfg(any( target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "netbsd", target_os = "openbsd", ))] Ipv4RecvDstAddr(libc::in_addr), /// UDP Generic Receive Offload (GRO) allows receiving multiple UDP /// packets from a single sender. /// Fixed-size payloads are following one by one in a receive buffer. /// This Control Message indicates the size of all smaller packets, /// except, maybe, the last one. /// /// `UdpGroSegment` socket option should be enabled on a socket /// to allow receiving GRO packets. #[cfg(target_os = "linux")] UdpGroSegments(u16), /// SO_RXQ_OVFL indicates that an unsigned 32 bit value /// ancilliary msg (cmsg) should be attached to recieved /// skbs indicating the number of packets dropped by the /// socket between the last recieved packet and this /// received packet. /// /// `RxqOvfl` socket option should be enabled on a socket /// to allow receiving the drop counter. #[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))] RxqOvfl(u32), /// Catch-all variant for unimplemented cmsg types. #[doc(hidden)] Unknown(UnknownCmsg), } impl ControlMessageOwned { /// Decodes a `ControlMessageOwned` from raw bytes. /// /// This is only safe to call if the data is correct for the message type /// specified in the header. Normally, the kernel ensures that this is the /// case. "Correct" in this case includes correct length, alignment and /// actual content. // Clippy complains about the pointer alignment of `p`, not understanding // that it's being fed to a function that can handle that. #[allow(clippy::cast_ptr_alignment)] unsafe fn decode_from(header: &cmsghdr) -> ControlMessageOwned { let p = CMSG_DATA(header); let len = header as *const _ as usize + header.cmsg_len as usize - p as usize; match (header.cmsg_level, header.cmsg_type) { (libc::SOL_SOCKET, libc::SCM_RIGHTS) => { let n = len / mem::size_of::<RawFd>(); let mut fds = Vec::with_capacity(n); for i in 0..n { let fdp = (p as *const RawFd).add(i); fds.push(ptr::read_unaligned(fdp)); } ControlMessageOwned::ScmRights(fds) }, #[cfg(any(target_os = "android", target_os = "linux"))] (libc::SOL_SOCKET, libc::SCM_CREDENTIALS) => { let cred: libc::ucred = ptr::read_unaligned(p as *const _); ControlMessageOwned::ScmCredentials(cred.into()) } #[cfg(any(target_os = "freebsd", target_os = "dragonfly"))] (libc::SOL_SOCKET, libc::SCM_CREDS) => { let cred: libc::cmsgcred = ptr::read_unaligned(p as *const _); ControlMessageOwned::ScmCreds(cred.into()) } (libc::SOL_SOCKET, libc::SCM_TIMESTAMP) => { let tv: libc::timeval = ptr::read_unaligned(p as *const _); ControlMessageOwned::ScmTimestamp(TimeVal::from(tv)) }, #[cfg(all(target_os = "linux"))] (libc::SOL_SOCKET, libc::SCM_TIMESTAMPNS) => { let ts: libc::timespec = ptr::read_unaligned(p as *const _); ControlMessageOwned::ScmTimestampns(TimeSpec::from(ts)) } #[cfg(any( target_os = "android", target_os = "freebsd", target_os = "ios", target_os = "linux", target_os = "macos" ))] (libc::IPPROTO_IPV6, libc::IPV6_PKTINFO) => { let info = ptr::read_unaligned(p as *const libc::in6_pktinfo); ControlMessageOwned::Ipv6PacketInfo(info) } #[cfg(any( target_os = "android", target_os = "ios", target_os = "linux", target_os = "macos", target_os = "netbsd", ))] (libc::IPPROTO_IP, libc::IP_PKTINFO) => { let info = ptr::read_unaligned(p as *const libc::in_pktinfo); ControlMessageOwned::Ipv4PacketInfo(info) } #[cfg(any( target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "netbsd", target_os = "openbsd", ))] (libc::IPPROTO_IP, libc::IP_RECVIF) => { let dl = ptr::read_unaligned(p as *const libc::sockaddr_dl); ControlMessageOwned::Ipv4RecvIf(dl) }, #[cfg(any( target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "netbsd", target_os = "openbsd", ))] (libc::IPPROTO_IP, libc::IP_RECVDSTADDR) => { let dl = ptr::read_unaligned(p as *const libc::in_addr); ControlMessageOwned::Ipv4RecvDstAddr(dl) }, #[cfg(target_os = "linux")] (libc::SOL_UDP, libc::UDP_GRO) => { let gso_size: u16 = ptr::read_unaligned(p as *const _); ControlMessageOwned::UdpGroSegments(gso_size) }, #[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))] (libc::SOL_SOCKET, libc::SO_RXQ_OVFL) => { let drop_counter = ptr::read_unaligned(p as *const u32); ControlMessageOwned::RxqOvfl(drop_counter) }, (_, _) => { let sl = slice::from_raw_parts(p, len); let ucmsg = UnknownCmsg(*header, Vec::<u8>::from(sl)); ControlMessageOwned::Unknown(ucmsg) } } } } /// A type-safe zero-copy wrapper around a single control message, as used wih /// [`sendmsg`](#fn.sendmsg). More types may be added to this enum; do not /// exhaustively pattern-match it. /// /// [Further reading](https://man7.org/linux/man-pages/man3/cmsg.3.html) #[derive(Clone, Copy, Debug, Eq, PartialEq)] #[non_exhaustive] pub enum ControlMessage<'a> { /// A message of type `SCM_RIGHTS`, containing an array of file /// descriptors passed between processes. /// /// See the description in the "Ancillary messages" section of the /// [unix(7) man page](https://man7.org/linux/man-pages/man7/unix.7.html). /// /// Using multiple `ScmRights` messages for a single `sendmsg` call isn't /// recommended since it causes platform-dependent behaviour: It might /// swallow all but the first `ScmRights` message or fail with `EINVAL`. /// Instead, you can put all fds to be passed into a single `ScmRights` /// message. ScmRights(&'a [RawFd]), /// A message of type `SCM_CREDENTIALS`, containing the pid, uid and gid of /// a process connected to the socket. /// /// This is similar to the socket option `SO_PEERCRED`, but requires a /// process to explicitly send its credentials. A process running as root is /// allowed to specify any credentials, while credentials sent by other /// processes are verified by the kernel. /// /// For further information, please refer to the /// [`unix(7)`](https://man7.org/linux/man-pages/man7/unix.7.html) man page. #[cfg(any(target_os = "android", target_os = "linux"))] ScmCredentials(&'a UnixCredentials), /// A message of type `SCM_CREDS`, containing the pid, uid, euid, gid and groups of /// a process connected to the socket. /// /// This is similar to the socket options `LOCAL_CREDS` and `LOCAL_PEERCRED`, but /// requires a process to explicitly send its credentials. /// /// Credentials are always overwritten by the kernel, so this variant does have /// any data, unlike the receive-side /// [`ControlMessageOwned::ScmCreds`][#enum.ControlMessageOwned.html#variant.ScmCreds]. /// /// For further information, please refer to the /// [`unix(4)`](https://www.freebsd.org/cgi/man.cgi?query=unix) man page. #[cfg(any(target_os = "freebsd", target_os = "dragonfly"))] ScmCreds, /// Set IV for `AF_ALG` crypto API. /// /// For further information, please refer to the /// [`documentation`](https://kernel.readthedocs.io/en/sphinx-samples/crypto-API.html) #[cfg(any( target_os = "android", target_os = "linux", ))] AlgSetIv(&'a [u8]), /// Set crypto operation for `AF_ALG` crypto API. It may be one of /// `ALG_OP_ENCRYPT` or `ALG_OP_DECRYPT` /// /// For further information, please refer to the /// [`documentation`](https://kernel.readthedocs.io/en/sphinx-samples/crypto-API.html) #[cfg(any( target_os = "android", target_os = "linux", ))] AlgSetOp(&'a libc::c_int), /// Set the length of associated authentication data (AAD) (applicable only to AEAD algorithms) /// for `AF_ALG` crypto API. /// /// For further information, please refer to the /// [`documentation`](https://kernel.readthedocs.io/en/sphinx-samples/crypto-API.html) #[cfg(any( target_os = "android", target_os = "linux", ))] AlgSetAeadAssoclen(&'a u32), /// UDP GSO makes it possible for applications to generate network packets /// for a virtual MTU much greater than the real one. /// The length of the send data no longer matches the expected length on /// the wire. /// The size of the datagram payload as it should appear on the wire may be /// passed through this control message. /// Send buffer should consist of multiple fixed-size wire payloads /// following one by one, and the last, possibly smaller one. #[cfg(target_os = "linux")] UdpGsoSegments(&'a u16), /// Configure the sending addressing and interface for v4 /// /// For further information, please refer to the /// [`ip(7)`](https://man7.org/linux/man-pages/man7/ip.7.html) man page. #[cfg(any(target_os = "linux", target_os = "macos", target_os = "netbsd", target_os = "android", target_os = "ios",))] Ipv4PacketInfo(&'a libc::in_pktinfo), /// Configure the sending addressing and interface for v6 /// /// For further information, please refer to the /// [`ipv6(7)`](https://man7.org/linux/man-pages/man7/ipv6.7.html) man page. #[cfg(any(target_os = "linux", target_os = "macos", target_os = "netbsd", target_os = "freebsd", target_os = "android", target_os = "ios",))] Ipv6PacketInfo(&'a libc::in6_pktinfo), /// SO_RXQ_OVFL indicates that an unsigned 32 bit value /// ancilliary msg (cmsg) should be attached to recieved /// skbs indicating the number of packets dropped by the /// socket between the last recieved packet and this /// received packet. #[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))] RxqOvfl(&'a u32), } // An opaque structure used to prevent cmsghdr from being a public type #[doc(hidden)] #[derive(Clone, Debug, Eq, PartialEq)] pub struct UnknownCmsg(cmsghdr, Vec<u8>); impl<'a> ControlMessage<'a> { /// The value of CMSG_SPACE on this message. /// Safe because CMSG_SPACE is always safe fn space(&self) -> usize { unsafe{CMSG_SPACE(self.len() as libc::c_uint) as usize} } /// The value of CMSG_LEN on this message. /// Safe because CMSG_LEN is always safe #[cfg(any(target_os = "android", all(target_os = "linux", not(target_env = "musl"))))] fn cmsg_len(&self) -> usize { unsafe{CMSG_LEN(self.len() as libc::c_uint) as usize} } #[cfg(not(any(target_os = "android", all(target_os = "linux", not(target_env = "musl")))))] fn cmsg_len(&self) -> libc::c_uint { unsafe{CMSG_LEN(self.len() as libc::c_uint)} } /// Return a reference to the payload data as a byte pointer fn copy_to_cmsg_data(&self, cmsg_data: *mut u8) { let data_ptr = match *self { ControlMessage::ScmRights(fds) => { fds as *const _ as *const u8 }, #[cfg(any(target_os = "android", target_os = "linux"))] ControlMessage::ScmCredentials(creds) => { &creds.0 as *const libc::ucred as *const u8 } #[cfg(any(target_os = "freebsd", target_os = "dragonfly"))] ControlMessage::ScmCreds => { // The kernel overwrites the data, we just zero it // to make sure it's not uninitialized memory unsafe { ptr::write_bytes(cmsg_data, 0, self.len()) }; return } #[cfg(any(target_os = "android", target_os = "linux"))] ControlMessage::AlgSetIv(iv) => { #[allow(deprecated)] // https://github.com/rust-lang/libc/issues/1501 let af_alg_iv = libc::af_alg_iv { ivlen: iv.len() as u32, iv: [0u8; 0], }; let size = mem::size_of_val(&af_alg_iv); unsafe { ptr::copy_nonoverlapping( &af_alg_iv as *const _ as *const u8, cmsg_data, size, ); ptr::copy_nonoverlapping( iv.as_ptr(), cmsg_data.add(size), iv.len() ); }; return }, #[cfg(any(target_os = "android", target_os = "linux"))] ControlMessage::AlgSetOp(op) => { op as *const _ as *const u8 }, #[cfg(any(target_os = "android", target_os = "linux"))] ControlMessage::AlgSetAeadAssoclen(len) => { len as *const _ as *const u8 }, #[cfg(target_os = "linux")] ControlMessage::UdpGsoSegments(gso_size) => { gso_size as *const _ as *const u8 }, #[cfg(any(target_os = "linux", target_os = "macos", target_os = "netbsd", target_os = "android", target_os = "ios",))] ControlMessage::Ipv4PacketInfo(info) => info as *const _ as *const u8, #[cfg(any(target_os = "linux", target_os = "macos", target_os = "netbsd", target_os = "freebsd", target_os = "android", target_os = "ios",))] ControlMessage::Ipv6PacketInfo(info) => info as *const _ as *const u8, #[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))] ControlMessage::RxqOvfl(drop_count) => { drop_count as *const _ as *const u8 }, }; unsafe { ptr::copy_nonoverlapping( data_ptr, cmsg_data, self.len() ) }; } /// The size of the payload, excluding its cmsghdr fn len(&self) -> usize { match *self { ControlMessage::ScmRights(fds) => { mem::size_of_val(fds) }, #[cfg(any(target_os = "android", target_os = "linux"))] ControlMessage::ScmCredentials(creds) => { mem::size_of_val(creds) } #[cfg(any(target_os = "freebsd", target_os = "dragonfly"))] ControlMessage::ScmCreds => { mem::size_of::<libc::cmsgcred>() } #[cfg(any(target_os = "android", target_os = "linux"))] ControlMessage::AlgSetIv(iv) => { mem::size_of_val(&iv) + iv.len() }, #[cfg(any(target_os = "android", target_os = "linux"))] ControlMessage::AlgSetOp(op) => { mem::size_of_val(op) }, #[cfg(any(target_os = "android", target_os = "linux"))] ControlMessage::AlgSetAeadAssoclen(len) => { mem::size_of_val(len) }, #[cfg(target_os = "linux")] ControlMessage::UdpGsoSegments(gso_size) => { mem::size_of_val(gso_size) }, #[cfg(any(target_os = "linux", target_os = "macos", target_os = "netbsd", target_os = "android", target_os = "ios",))] ControlMessage::Ipv4PacketInfo(info) => mem::size_of_val(info), #[cfg(any(target_os = "linux", target_os = "macos", target_os = "netbsd", target_os = "freebsd", target_os = "android", target_os = "ios",))] ControlMessage::Ipv6PacketInfo(info) => mem::size_of_val(info), #[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))] ControlMessage::RxqOvfl(drop_count) => { mem::size_of_val(drop_count) }, } } /// Returns the value to put into the `cmsg_level` field of the header. fn cmsg_level(&self) -> libc::c_int { match *self { ControlMessage::ScmRights(_) => libc::SOL_SOCKET, #[cfg(any(target_os = "android", target_os = "linux"))] ControlMessage::ScmCredentials(_) => libc::SOL_SOCKET, #[cfg(any(target_os = "freebsd", target_os = "dragonfly"))] ControlMessage::ScmCreds => libc::SOL_SOCKET, #[cfg(any(target_os = "android", target_os = "linux"))] ControlMessage::AlgSetIv(_) | ControlMessage::AlgSetOp(_) | ControlMessage::AlgSetAeadAssoclen(_) => libc::SOL_ALG, #[cfg(target_os = "linux")] ControlMessage::UdpGsoSegments(_) => libc::SOL_UDP, #[cfg(any(target_os = "linux", target_os = "macos", target_os = "netbsd", target_os = "android", target_os = "ios",))] ControlMessage::Ipv4PacketInfo(_) => libc::IPPROTO_IP, #[cfg(any(target_os = "linux", target_os = "macos", target_os = "netbsd", target_os = "freebsd", target_os = "android", target_os = "ios",))] ControlMessage::Ipv6PacketInfo(_) => libc::IPPROTO_IPV6, #[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))] ControlMessage::RxqOvfl(_) => libc::SOL_SOCKET, } } /// Returns the value to put into the `cmsg_type` field of the header. fn cmsg_type(&self) -> libc::c_int { match *self { ControlMessage::ScmRights(_) => libc::SCM_RIGHTS, #[cfg(any(target_os = "android", target_os = "linux"))] ControlMessage::ScmCredentials(_) => libc::SCM_CREDENTIALS, #[cfg(any(target_os = "freebsd", target_os = "dragonfly"))] ControlMessage::ScmCreds => libc::SCM_CREDS, #[cfg(any(target_os = "android", target_os = "linux"))] ControlMessage::AlgSetIv(_) => { libc::ALG_SET_IV }, #[cfg(any(target_os = "android", target_os = "linux"))] ControlMessage::AlgSetOp(_) => { libc::ALG_SET_OP }, #[cfg(any(target_os = "android", target_os = "linux"))] ControlMessage::AlgSetAeadAssoclen(_) => { libc::ALG_SET_AEAD_ASSOCLEN }, #[cfg(target_os = "linux")] ControlMessage::UdpGsoSegments(_) => { libc::UDP_SEGMENT }, #[cfg(any(target_os = "linux", target_os = "macos", target_os = "netbsd", target_os = "android", target_os = "ios",))] ControlMessage::Ipv4PacketInfo(_) => libc::IP_PKTINFO, #[cfg(any(target_os = "linux", target_os = "macos", target_os = "netbsd", target_os = "freebsd", target_os = "android", target_os = "ios",))] ControlMessage::Ipv6PacketInfo(_) => libc::IPV6_PKTINFO, #[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))] ControlMessage::RxqOvfl(_) => { libc::SO_RXQ_OVFL }, } } // Unsafe: cmsg must point to a valid cmsghdr with enough space to // encode self. unsafe fn encode_into(&self, cmsg: *mut cmsghdr) { (*cmsg).cmsg_level = self.cmsg_level(); (*cmsg).cmsg_type = self.cmsg_type(); (*cmsg).cmsg_len = self.cmsg_len(); self.copy_to_cmsg_data(CMSG_DATA(cmsg)); } } /// Send data in scatter-gather vectors to a socket, possibly accompanied /// by ancillary data. Optionally direct the message at the given address, /// as with sendto. /// /// Allocates if cmsgs is nonempty. pub fn sendmsg(fd: RawFd, iov: &[IoVec<&[u8]>], cmsgs: &[ControlMessage], flags: MsgFlags, addr: Option<&SockAddr>) -> Result<usize> { let capacity = cmsgs.iter().map(|c| c.space()).sum(); // First size the buffer needed to hold the cmsgs. It must be zeroed, // because subsequent code will not clear the padding bytes. let mut cmsg_buffer = vec![0u8; capacity]; let mhdr = pack_mhdr_to_send(&mut cmsg_buffer[..], &iov, &cmsgs, addr); let ret = unsafe { libc::sendmsg(fd, &mhdr, flags.bits()) }; Errno::result(ret).map(|r| r as usize) } #[cfg(any( target_os = "linux", target_os = "android", target_os = "freebsd", target_os = "netbsd", ))] #[derive(Debug)] pub struct SendMmsgData<'a, I, C> where I: AsRef<[IoVec<&'a [u8]>]>, C: AsRef<[ControlMessage<'a>]> { pub iov: I, pub cmsgs: C, pub addr: Option<SockAddr>, pub _lt: std::marker::PhantomData<&'a I>, } /// An extension of `sendmsg` that allows the caller to transmit multiple /// messages on a socket using a single system call. This has performance /// benefits for some applications. /// /// Allocations are performed for cmsgs and to build `msghdr` buffer /// /// # Arguments /// /// * `fd`: Socket file descriptor /// * `data`: Struct that implements `IntoIterator` with `SendMmsgData` items /// * `flags`: Optional flags passed directly to the operating system. /// /// # Returns /// `Vec` with numbers of sent bytes on each sent message. /// /// # References /// [`sendmsg`](fn.sendmsg.html) #[cfg(any( target_os = "linux", target_os = "android", target_os = "freebsd", target_os = "netbsd", ))] pub fn sendmmsg<'a, I, C>( fd: RawFd, data: impl std::iter::IntoIterator<Item=&'a SendMmsgData<'a, I, C>>, flags: MsgFlags ) -> Result<Vec<usize>> where I: AsRef<[IoVec<&'a [u8]>]> + 'a, C: AsRef<[ControlMessage<'a>]> + 'a, { let iter = data.into_iter(); let size_hint = iter.size_hint(); let reserve_items = size_hint.1.unwrap_or(size_hint.0); let mut output = Vec::<libc::mmsghdr>::with_capacity(reserve_items); let mut cmsgs_buffers = Vec::<Vec<u8>>::with_capacity(reserve_items); for d in iter { let capacity: usize = d.cmsgs.as_ref().iter().map(|c| c.space()).sum(); let mut cmsgs_buffer = vec![0u8; capacity]; output.push(libc::mmsghdr { msg_hdr: pack_mhdr_to_send( &mut cmsgs_buffer, &d.iov, &d.cmsgs, d.addr.as_ref() ), msg_len: 0, }); cmsgs_buffers.push(cmsgs_buffer); }; let ret = unsafe { libc::sendmmsg(fd, output.as_mut_ptr(), output.len() as _, flags.bits() as _) }; let sent_messages = Errno::result(ret)? as usize; let mut sent_bytes = Vec::with_capacity(sent_messages); for item in &output { sent_bytes.push(item.msg_len as usize); } Ok(sent_bytes) } #[cfg(any( target_os = "linux", target_os = "android", target_os = "freebsd", target_os = "netbsd", ))] #[derive(Debug)] pub struct RecvMmsgData<'a, I> where I: AsRef<[IoVec<&'a mut [u8]>]> + 'a, { pub iov: I, pub cmsg_buffer: Option<&'a mut Vec<u8>>, } /// An extension of `recvmsg` that allows the caller to receive multiple /// messages from a socket using a single system call. This has /// performance benefits for some applications. /// /// `iov` and `cmsg_buffer` should be constructed similarly to `recvmsg` /// /// Multiple allocations are performed /// /// # Arguments /// /// * `fd`: Socket file descriptor /// * `data`: Struct that implements `IntoIterator` with `RecvMmsgData` items /// * `flags`: Optional flags passed directly to the operating system. /// /// # RecvMmsgData /// /// * `iov`: Scatter-gather list of buffers to receive the message /// * `cmsg_buffer`: Space to receive ancillary data. Should be created by /// [`cmsg_space!`](macro.cmsg_space.html) /// /// # Returns /// A `Vec` with multiple `RecvMsg`, one per received message /// /// # References /// - [`recvmsg`](fn.recvmsg.html) /// - [`RecvMsg`](struct.RecvMsg.html) #[cfg(any( target_os = "linux", target_os = "android", target_os = "freebsd", target_os = "netbsd", ))] pub fn recvmmsg<'a, I>( fd: RawFd, data: impl std::iter::IntoIterator<Item=&'a mut RecvMmsgData<'a, I>, IntoIter=impl ExactSizeIterator + Iterator<Item=&'a mut RecvMmsgData<'a, I>>>, flags: MsgFlags, timeout: Option<crate::sys::time::TimeSpec> ) -> Result<Vec<RecvMsg<'a>>> where I: AsRef<[IoVec<&'a mut [u8]>]> + 'a, { let iter = data.into_iter(); let num_messages = iter.len(); let mut output: Vec<libc::mmsghdr> = Vec::with_capacity(num_messages); // Addresses should be pre-allocated. pack_mhdr_to_receive will store them // as raw pointers, so we may not move them. Turn the vec into a boxed // slice so we won't inadvertently reallocate the vec. let mut addresses = vec![mem::MaybeUninit::uninit(); num_messages] .into_boxed_slice(); let results: Vec<_> = iter.enumerate().map(|(i, d)| { let (msg_controllen, mhdr) = unsafe { pack_mhdr_to_receive( d.iov.as_ref(), &mut d.cmsg_buffer, addresses[i].as_mut_ptr(), ) }; output.push( libc::mmsghdr { msg_hdr: mhdr, msg_len: 0, } ); (msg_controllen as usize, &mut d.cmsg_buffer) }).collect(); let timeout = if let Some(mut t) = timeout { t.as_mut() as *mut libc::timespec } else { ptr::null_mut() }; let ret = unsafe { libc::recvmmsg(fd, output.as_mut_ptr(), output.len() as _, flags.bits() as _, timeout) }; let _ = Errno::result(ret)?; Ok(output .into_iter() .take(ret as usize) .zip(addresses.iter().map(|addr| unsafe{addr.assume_init()})) .zip(results.into_iter()) .map(|((mmsghdr, address), (msg_controllen, cmsg_buffer))| { unsafe { read_mhdr( mmsghdr.msg_hdr, mmsghdr.msg_len as isize, msg_controllen, address, cmsg_buffer ) } }) .collect()) } unsafe fn read_mhdr<'a, 'b>( mhdr: msghdr, r: isize, msg_controllen: usize, address: sockaddr_storage, cmsg_buffer: &'a mut Option<&'b mut Vec<u8>> ) -> RecvMsg<'b> { let cmsghdr = { if mhdr.msg_controllen > 0 { // got control message(s) cmsg_buffer .as_mut() .unwrap() .set_len(mhdr.msg_controllen as usize); debug_assert!(!mhdr.msg_control.is_null()); debug_assert!(msg_controllen >= mhdr.msg_controllen as usize); CMSG_FIRSTHDR(&mhdr as *const msghdr) } else { ptr::null() }.as_ref() }; let address = sockaddr_storage_to_addr( &address , mhdr.msg_namelen as usize ).ok(); RecvMsg { bytes: r as usize, cmsghdr, address, flags: MsgFlags::from_bits_truncate(mhdr.msg_flags), mhdr, } } unsafe fn pack_mhdr_to_receive<'a, I>( iov: I, cmsg_buffer: &mut Option<&mut Vec<u8>>, address: *mut sockaddr_storage, ) -> (usize, msghdr) where I: AsRef<[IoVec<&'a mut [u8]>]> + 'a, { let (msg_control, msg_controllen) = cmsg_buffer.as_mut() .map(|v| (v.as_mut_ptr(), v.capacity())) .unwrap_or((ptr::null_mut(), 0)); let mhdr = { // Musl's msghdr has private fields, so this is the only way to // initialize it. let mut mhdr = mem::MaybeUninit::<msghdr>::zeroed(); let p = mhdr.as_mut_ptr(); (*p).msg_name = address as *mut c_void; (*p).msg_namelen = mem::size_of::<sockaddr_storage>() as socklen_t; (*p).msg_iov = iov.as_ref().as_ptr() as *mut iovec; (*p).msg_iovlen = iov.as_ref().len() as _; (*p).msg_control = msg_control as *mut c_void; (*p).msg_controllen = msg_controllen as _; (*p).msg_flags = 0; mhdr.assume_init() }; (msg_controllen, mhdr) } fn pack_mhdr_to_send<'a, I, C>( cmsg_buffer: &mut [u8], iov: I, cmsgs: C, addr: Option<&SockAddr> ) -> msghdr where I: AsRef<[IoVec<&'a [u8]>]>, C: AsRef<[ControlMessage<'a>]> { let capacity = cmsg_buffer.len(); // Next encode the sending address, if provided let (name, namelen) = match addr { Some(addr) => { let (x, y) = addr.as_ffi_pair(); (x as *const _, y) }, None => (ptr::null(), 0), }; // The message header must be initialized before the individual cmsgs. let cmsg_ptr = if capacity > 0 { cmsg_buffer.as_ptr() as *mut c_void } else { ptr::null_mut() }; let mhdr = unsafe { // Musl's msghdr has private fields, so this is the only way to // initialize it. let mut mhdr = mem::MaybeUninit::<msghdr>::zeroed(); let p = mhdr.as_mut_ptr(); (*p).msg_name = name as *mut _; (*p).msg_namelen = namelen; // transmute iov into a mutable pointer. sendmsg doesn't really mutate // the buffer, but the standard says that it takes a mutable pointer (*p).msg_iov = iov.as_ref().as_ptr() as *mut _; (*p).msg_iovlen = iov.as_ref().len() as _; (*p).msg_control = cmsg_ptr; (*p).msg_controllen = capacity as _; (*p).msg_flags = 0; mhdr.assume_init() }; // Encode each cmsg. This must happen after initializing the header because // CMSG_NEXT_HDR and friends read the msg_control and msg_controllen fields. // CMSG_FIRSTHDR is always safe let mut pmhdr: *mut cmsghdr = unsafe { CMSG_FIRSTHDR(&mhdr as *const msghdr) }; for cmsg in cmsgs.as_ref() { assert_ne!(pmhdr, ptr::null_mut()); // Safe because we know that pmhdr is valid, and we initialized it with // sufficient space unsafe { cmsg.encode_into(pmhdr) }; // Safe because mhdr is valid pmhdr = unsafe { CMSG_NXTHDR(&mhdr as *const msghdr, pmhdr) }; } mhdr } /// Receive message in scatter-gather vectors from a socket, and /// optionally receive ancillary data into the provided buffer. /// If no ancillary data is desired, use () as the type parameter. /// /// # Arguments /// /// * `fd`: Socket file descriptor /// * `iov`: Scatter-gather list of buffers to receive the message /// * `cmsg_buffer`: Space to receive ancillary data. Should be created by /// [`cmsg_space!`](macro.cmsg_space.html) /// * `flags`: Optional flags passed directly to the operating system. /// /// # References /// [recvmsg(2)](https://pubs.opengroup.org/onlinepubs/9699919799/functions/recvmsg.html) pub fn recvmsg<'a>(fd: RawFd, iov: &[IoVec<&mut [u8]>], mut cmsg_buffer: Option<&'a mut Vec<u8>>, flags: MsgFlags) -> Result<RecvMsg<'a>> { let mut address = mem::MaybeUninit::uninit(); let (msg_controllen, mut mhdr) = unsafe { pack_mhdr_to_receive(&iov, &mut cmsg_buffer, address.as_mut_ptr()) }; let ret = unsafe { libc::recvmsg(fd, &mut mhdr, flags.bits()) }; let r = Errno::result(ret)?; Ok(unsafe { read_mhdr(mhdr, r, msg_controllen, address.assume_init(), &mut cmsg_buffer) }) } /// Create an endpoint for communication /// /// The `protocol` specifies a particular protocol to be used with the /// socket. Normally only a single protocol exists to support a /// particular socket type within a given protocol family, in which case /// protocol can be specified as `None`. However, it is possible that many /// protocols may exist, in which case a particular protocol must be /// specified in this manner. /// /// [Further reading](https://pubs.opengroup.org/onlinepubs/9699919799/functions/socket.html) pub fn socket<T: Into<Option<SockProtocol>>>(domain: AddressFamily, ty: SockType, flags: SockFlag, protocol: T) -> Result<RawFd> { let protocol = match protocol.into() { None => 0, Some(p) => p as c_int, }; // SockFlags are usually embedded into `ty`, but we don't do that in `nix` because it's a // little easier to understand by separating it out. So we have to merge these bitfields // here. let mut ty = ty as c_int; ty |= flags.bits(); let res = unsafe { libc::socket(domain as c_int, ty, protocol) }; Errno::result(res) } /// Create a pair of connected sockets /// /// [Further reading](https://pubs.opengroup.org/onlinepubs/9699919799/functions/socketpair.html) pub fn socketpair<T: Into<Option<SockProtocol>>>(domain: AddressFamily, ty: SockType, protocol: T, flags: SockFlag) -> Result<(RawFd, RawFd)> { let protocol = match protocol.into() { None => 0, Some(p) => p as c_int, }; // SockFlags are usually embedded into `ty`, but we don't do that in `nix` because it's a // little easier to understand by separating it out. So we have to merge these bitfields // here. let mut ty = ty as c_int; ty |= flags.bits(); let mut fds = [-1, -1]; let res = unsafe { libc::socketpair(domain as c_int, ty, protocol, fds.as_mut_ptr()) }; Errno::result(res)?; Ok((fds[0], fds[1])) } /// Listen for connections on a socket /// /// [Further reading](https://pubs.opengroup.org/onlinepubs/9699919799/functions/listen.html) pub fn listen(sockfd: RawFd, backlog: usize) -> Result<()> { let res = unsafe { libc::listen(sockfd, backlog as c_int) }; Errno::result(res).map(drop) } /// Bind a name to a socket /// /// [Further reading](https://pubs.opengroup.org/onlinepubs/9699919799/functions/bind.html) pub fn bind(fd: RawFd, addr: &SockAddr) -> Result<()> { let res = unsafe { let (ptr, len) = addr.as_ffi_pair(); libc::bind(fd, ptr, len) }; Errno::result(res).map(drop) } /// Accept a connection on a socket /// /// [Further reading](https://pubs.opengroup.org/onlinepubs/9699919799/functions/accept.html) pub fn accept(sockfd: RawFd) -> Result<RawFd> { let res = unsafe { libc::accept(sockfd, ptr::null_mut(), ptr::null_mut()) }; Errno::result(res) } /// Accept a connection on a socket /// /// [Further reading](https://man7.org/linux/man-pages/man2/accept.2.html) #[cfg(any(all( target_os = "android", any( target_arch = "aarch64", target_arch = "x86", target_arch = "x86_64" ) ), target_os = "freebsd", target_os = "linux", target_os = "openbsd"))] pub fn accept4(sockfd: RawFd, flags: SockFlag) -> Result<RawFd> { let res = unsafe { libc::accept4(sockfd, ptr::null_mut(), ptr::null_mut(), flags.bits()) }; Errno::result(res) } /// Initiate a connection on a socket /// /// [Further reading](https://pubs.opengroup.org/onlinepubs/9699919799/functions/connect.html) pub fn connect(fd: RawFd, addr: &SockAddr) -> Result<()> { let res = unsafe { let (ptr, len) = addr.as_ffi_pair(); libc::connect(fd, ptr, len) }; Errno::result(res).map(drop) } /// Receive data from a connection-oriented socket. Returns the number of /// bytes read /// /// [Further reading](https://pubs.opengroup.org/onlinepubs/9699919799/functions/recv.html) pub fn recv(sockfd: RawFd, buf: &mut [u8], flags: MsgFlags) -> Result<usize> { unsafe { let ret = libc::recv( sockfd, buf.as_ptr() as *mut c_void, buf.len() as size_t, flags.bits()); Errno::result(ret).map(|r| r as usize) } } /// Receive data from a connectionless or connection-oriented socket. Returns /// the number of bytes read and, for connectionless sockets, the socket /// address of the sender. /// /// [Further reading](https://pubs.opengroup.org/onlinepubs/9699919799/functions/recvfrom.html) pub fn recvfrom(sockfd: RawFd, buf: &mut [u8]) -> Result<(usize, Option<SockAddr>)> { unsafe { let mut addr: sockaddr_storage = mem::zeroed(); let mut len = mem::size_of::<sockaddr_storage>() as socklen_t; let ret = Errno::result(libc::recvfrom( sockfd, buf.as_ptr() as *mut c_void, buf.len() as size_t, 0, &mut addr as *mut libc::sockaddr_storage as *mut libc::sockaddr, &mut len as *mut socklen_t))? as usize; match sockaddr_storage_to_addr(&addr, len as usize) { Err(Errno::ENOTCONN) => Ok((ret, None)), Ok(addr) => Ok((ret, Some(addr))), Err(e) => Err(e) } } } /// Send a message to a socket /// /// [Further reading](https://pubs.opengroup.org/onlinepubs/9699919799/functions/sendto.html) pub fn sendto(fd: RawFd, buf: &[u8], addr: &SockAddr, flags: MsgFlags) -> Result<usize> { let ret = unsafe { let (ptr, len) = addr.as_ffi_pair(); libc::sendto(fd, buf.as_ptr() as *const c_void, buf.len() as size_t, flags.bits(), ptr, len) }; Errno::result(ret).map(|r| r as usize) } /// Send data to a connection-oriented socket. Returns the number of bytes read /// /// [Further reading](https://pubs.opengroup.org/onlinepubs/9699919799/functions/send.html) pub fn send(fd: RawFd, buf: &[u8], flags: MsgFlags) -> Result<usize> { let ret = unsafe { libc::send(fd, buf.as_ptr() as *const c_void, buf.len() as size_t, flags.bits()) }; Errno::result(ret).map(|r| r as usize) } /* * * ===== Socket Options ===== * */ /// Represents a socket option that can be accessed or set. Used as an argument /// to `getsockopt` pub trait GetSockOpt : Copy { type Val; #[doc(hidden)] fn get(&self, fd: RawFd) -> Result<Self::Val>; } /// Represents a socket option that can be accessed or set. Used as an argument /// to `setsockopt` pub trait SetSockOpt : Clone { type Val; #[doc(hidden)] fn set(&self, fd: RawFd, val: &Self::Val) -> Result<()>; } /// Get the current value for the requested socket option /// /// [Further reading](https://pubs.opengroup.org/onlinepubs/9699919799/functions/getsockopt.html) pub fn getsockopt<O: GetSockOpt>(fd: RawFd, opt: O) -> Result<O::Val> { opt.get(fd) } /// Sets the value for the requested socket option /// /// [Further reading](https://pubs.opengroup.org/onlinepubs/9699919799/functions/setsockopt.html) /// /// # Examples /// /// ``` /// use nix::sys::socket::setsockopt; /// use nix::sys::socket::sockopt::KeepAlive; /// use std::net::TcpListener; /// use std::os::unix::io::AsRawFd; /// /// let listener = TcpListener::bind("0.0.0.0:0").unwrap(); /// let fd = listener.as_raw_fd(); /// let res = setsockopt(fd, KeepAlive, &true); /// assert!(res.is_ok()); /// ``` pub fn setsockopt<O: SetSockOpt>(fd: RawFd, opt: O, val: &O::Val) -> Result<()> { opt.set(fd, val) } /// Get the address of the peer connected to the socket `fd`. /// /// [Further reading](https://pubs.opengroup.org/onlinepubs/9699919799/functions/getpeername.html) pub fn getpeername(fd: RawFd) -> Result<SockAddr> { unsafe { let mut addr = mem::MaybeUninit::uninit(); let mut len = mem::size_of::<sockaddr_storage>() as socklen_t; let ret = libc::getpeername( fd, addr.as_mut_ptr() as *mut libc::sockaddr, &mut len ); Errno::result(ret)?; sockaddr_storage_to_addr(&addr.assume_init(), len as usize) } } /// Get the current address to which the socket `fd` is bound. /// /// [Further reading](https://pubs.opengroup.org/onlinepubs/9699919799/functions/getsockname.html) pub fn getsockname(fd: RawFd) -> Result<SockAddr> { unsafe { let mut addr = mem::MaybeUninit::uninit(); let mut len = mem::size_of::<sockaddr_storage>() as socklen_t; let ret = libc::getsockname( fd, addr.as_mut_ptr() as *mut libc::sockaddr, &mut len ); Errno::result(ret)?; sockaddr_storage_to_addr(&addr.assume_init(), len as usize) } } /// Return the appropriate `SockAddr` type from a `sockaddr_storage` of a /// certain size. /// /// In C this would usually be done by casting. The `len` argument /// should be the number of bytes in the `sockaddr_storage` that are actually /// allocated and valid. It must be at least as large as all the useful parts /// of the structure. Note that in the case of a `sockaddr_un`, `len` need not /// include the terminating null. pub fn sockaddr_storage_to_addr( addr: &sockaddr_storage, len: usize) -> Result<SockAddr> { assert!(len <= mem::size_of::<sockaddr_storage>()); if len < mem::size_of_val(&addr.ss_family) { return Err(Error::from(Errno::ENOTCONN)); } match c_int::from(addr.ss_family) { libc::AF_INET => { assert!(len as usize >= mem::size_of::<sockaddr_in>()); let sin = unsafe { *(addr as *const sockaddr_storage as *const sockaddr_in) }; Ok(SockAddr::Inet(InetAddr::V4(sin))) } libc::AF_INET6 => { assert!(len as usize >= mem::size_of::<sockaddr_in6>()); let sin6 = unsafe { *(addr as *const _ as *const sockaddr_in6) }; Ok(SockAddr::Inet(InetAddr::V6(sin6))) } libc::AF_UNIX => { let pathlen = len - offset_of!(sockaddr_un, sun_path); let sun = unsafe { *(addr as *const _ as *const sockaddr_un) }; Ok(SockAddr::Unix(UnixAddr(sun, pathlen))) } #[cfg(any(target_os = "android", target_os = "linux"))] libc::AF_PACKET => { use libc::sockaddr_ll; // Don't assert anything about the size. // Apparently the Linux kernel can return smaller sizes when // the value in the last element of sockaddr_ll (`sll_addr`) is // smaller than the declared size of that field let sll = unsafe { *(addr as *const _ as *const sockaddr_ll) }; Ok(SockAddr::Link(LinkAddr(sll))) } #[cfg(any(target_os = "android", target_os = "linux"))] libc::AF_NETLINK => { use libc::sockaddr_nl; let snl = unsafe { *(addr as *const _ as *const sockaddr_nl) }; Ok(SockAddr::Netlink(NetlinkAddr(snl))) } #[cfg(any(target_os = "android", target_os = "linux"))] libc::AF_ALG => { use libc::sockaddr_alg; let salg = unsafe { *(addr as *const _ as *const sockaddr_alg) }; Ok(SockAddr::Alg(AlgAddr(salg))) } #[cfg(any(target_os = "android", target_os = "linux"))] libc::AF_VSOCK => { use libc::sockaddr_vm; let svm = unsafe { *(addr as *const _ as *const sockaddr_vm) }; Ok(SockAddr::Vsock(VsockAddr(svm))) } af => panic!("unexpected address family {}", af), } } #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub enum Shutdown { /// Further receptions will be disallowed. Read, /// Further transmissions will be disallowed. Write, /// Further receptions and transmissions will be disallowed. Both, } /// Shut down part of a full-duplex connection. /// /// [Further reading](https://pubs.opengroup.org/onlinepubs/9699919799/functions/shutdown.html) pub fn shutdown(df: RawFd, how: Shutdown) -> Result<()> { unsafe { use libc::shutdown; let how = match how { Shutdown::Read => libc::SHUT_RD, Shutdown::Write => libc::SHUT_WR, Shutdown::Both => libc::SHUT_RDWR, }; Errno::result(shutdown(df, how)).map(drop) } } #[cfg(test)] mod tests { #[test] fn can_use_cmsg_space() { let _ = cmsg_space!(u8); } }
36.706631
130
0.581397
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn can_use_cmsg_space() {\n let _ = cmsg_space!(u8);\n }\n}" ]
f701cc09ffa27d21e22cd3cddb86c098ad91ff22
9,309
rs
Rust
src/util.rs
alpearce/rust-tuf
4d4fa7330ba105481c0c4018a57e15efdb27be98
[ "Apache-2.0", "MIT" ]
null
null
null
src/util.rs
alpearce/rust-tuf
4d4fa7330ba105481c0c4018a57e15efdb27be98
[ "Apache-2.0", "MIT" ]
null
null
null
src/util.rs
alpearce/rust-tuf
4d4fa7330ba105481c0c4018a57e15efdb27be98
[ "Apache-2.0", "MIT" ]
null
null
null
use chrono::offset::Utc; use chrono::DateTime; use futures_io::AsyncRead; use futures_util::ready; use ring::digest::{self, SHA256, SHA512}; use std::io::{self, ErrorKind}; use std::marker::Unpin; use std::pin::Pin; use std::task::{Context, Poll}; use crate::crypto::{HashAlgorithm, HashValue}; use crate::error::Error; use crate::Result; /// Wrapper to verify a byte stream as it is read. /// /// Wraps a `Read` to ensure that the consumer can't read more than a capped maximum number of /// bytes. Also, this ensures that a minimum bitrate and returns an `Err` if it is not. Finally, /// when the underlying `Read` is fully consumed, the hash of the data is optionally calculated. If /// the calculated hash does not match the given hash, it will return an `Err`. Consumers of a /// `SafeReader` should purge and untrust all read bytes if this ever returns an `Err`. /// /// It is **critical** that none of the bytes from this struct are used until it has been fully /// consumed as the data is untrusted. pub struct SafeReader<R> { inner: R, max_size: u64, min_bytes_per_second: u32, hasher: Option<(digest::Context, HashValue)>, start_time: Option<DateTime<Utc>>, bytes_read: u64, } impl<R: AsyncRead> SafeReader<R> { /// Create a new `SafeReader`. /// /// The argument `hash_data` takes a `HashAlgorithm` and expected `HashValue`. The given /// algorithm is used to hash the data as it is read. At the end of the stream, the digest is /// calculated and compared against `HashValue`. If the two are not equal, it means the data /// stream has been tampered with in some way. pub fn new( read: R, max_size: u64, min_bytes_per_second: u32, hash_data: Option<(&HashAlgorithm, HashValue)>, ) -> Result<Self> { let hasher = match hash_data { Some((alg, value)) => { let ctx = match *alg { HashAlgorithm::Sha256 => digest::Context::new(&SHA256), HashAlgorithm::Sha512 => digest::Context::new(&SHA512), HashAlgorithm::Unknown(ref s) => { return Err(Error::IllegalArgument(format!( "Unknown hash algorithm: {}", s ))); } }; Some((ctx, value)) } None => None, }; Ok(SafeReader { inner: read, max_size, min_bytes_per_second, hasher, start_time: None, bytes_read: 0, }) } } impl<R: AsyncRead + Unpin> AsyncRead for SafeReader<R> { fn poll_read( mut self: Pin<&mut Self>, cx: &mut Context, buf: &mut [u8], ) -> Poll<io::Result<usize>> { let read_bytes = ready!(Pin::new(&mut self.inner).poll_read(cx, buf))?; if self.start_time.is_none() { self.start_time = Some(Utc::now()) } if read_bytes == 0 { if let Some((context, expected_hash)) = self.hasher.take() { let generated_hash = context.finish(); if generated_hash.as_ref() != expected_hash.value() { return Poll::Ready(Err(io::Error::new( ErrorKind::InvalidData, "Calculated hash did not match the required hash.", ))); } } return Poll::Ready(Ok(0)); } match self.bytes_read.checked_add(read_bytes as u64) { Some(sum) if sum <= self.max_size => self.bytes_read = sum, _ => { return Poll::Ready(Err(io::Error::new( ErrorKind::InvalidData, "Read exceeded the maximum allowed bytes.", ))); } } let duration = Utc::now().signed_duration_since(self.start_time.unwrap()); // 30 second grace period before we start checking the bitrate if duration.num_seconds() >= 30 { if (self.bytes_read as f32) / (duration.num_seconds() as f32) < self.min_bytes_per_second as f32 { return Poll::Ready(Err(io::Error::new( ErrorKind::TimedOut, "Read aborted. Bitrate too low.", ))); } } if let Some((ref mut context, _)) = self.hasher { context.update(&buf[..(read_bytes)]); } Poll::Ready(Ok(read_bytes)) } } #[cfg(test)] mod test { use super::*; use futures_executor::block_on; use futures_util::io::AsyncReadExt; #[test] fn valid_read() { block_on(async { let bytes: &[u8] = &[0x00, 0x01, 0x02, 0x03]; let mut reader = SafeReader::new(bytes, bytes.len() as u64, 0, None).unwrap(); let mut buf = Vec::new(); assert!(reader.read_to_end(&mut buf).await.is_ok()); assert_eq!(buf, bytes); }) } #[test] fn valid_read_large_data() { block_on(async { let bytes: &[u8] = &[0x00; 64 * 1024]; let mut reader = SafeReader::new(bytes, bytes.len() as u64, 0, None).unwrap(); let mut buf = Vec::new(); assert!(reader.read_to_end(&mut buf).await.is_ok()); assert_eq!(buf, bytes); }) } #[test] fn valid_read_below_max_size() { block_on(async { let bytes: &[u8] = &[0x00, 0x01, 0x02, 0x03]; let mut reader = SafeReader::new(bytes, (bytes.len() as u64) + 1, 0, None).unwrap(); let mut buf = Vec::new(); assert!(reader.read_to_end(&mut buf).await.is_ok()); assert_eq!(buf, bytes); }) } #[test] fn invalid_read_above_max_size() { block_on(async { let bytes: &[u8] = &[0x00, 0x01, 0x02, 0x03]; let mut reader = SafeReader::new(bytes, (bytes.len() as u64) - 1, 0, None).unwrap(); let mut buf = Vec::new(); assert!(reader.read_to_end(&mut buf).await.is_err()); }) } #[test] fn invalid_read_above_max_size_large_data() { block_on(async { let bytes: &[u8] = &[0x00; 64 * 1024]; let mut reader = SafeReader::new(bytes, (bytes.len() as u64) - 1, 0, None).unwrap(); let mut buf = Vec::new(); assert!(reader.read_to_end(&mut buf).await.is_err()); }) } #[test] fn valid_read_good_hash() { block_on(async { let bytes: &[u8] = &[0x00, 0x01, 0x02, 0x03]; let mut context = digest::Context::new(&SHA256); context.update(&bytes); let hash_value = HashValue::new(context.finish().as_ref().to_vec()); let mut reader = SafeReader::new( bytes, bytes.len() as u64, 0, Some((&HashAlgorithm::Sha256, hash_value)), ) .unwrap(); let mut buf = Vec::new(); assert!(reader.read_to_end(&mut buf).await.is_ok()); assert_eq!(buf, bytes); }) } #[test] fn invalid_read_bad_hash() { block_on(async { let bytes: &[u8] = &[0x00, 0x01, 0x02, 0x03]; let mut context = digest::Context::new(&SHA256); context.update(&bytes); context.update(&[0xFF]); // evil bytes let hash_value = HashValue::new(context.finish().as_ref().to_vec()); let mut reader = SafeReader::new( bytes, bytes.len() as u64, 0, Some((&HashAlgorithm::Sha256, hash_value)), ) .unwrap(); let mut buf = Vec::new(); assert!(reader.read_to_end(&mut buf).await.is_err()); }) } #[test] fn valid_read_good_hash_large_data() { block_on(async { let bytes: &[u8] = &[0x00; 64 * 1024]; let mut context = digest::Context::new(&SHA256); context.update(&bytes); let hash_value = HashValue::new(context.finish().as_ref().to_vec()); let mut reader = SafeReader::new( bytes, bytes.len() as u64, 0, Some((&HashAlgorithm::Sha256, hash_value)), ) .unwrap(); let mut buf = Vec::new(); assert!(reader.read_to_end(&mut buf).await.is_ok()); assert_eq!(buf, bytes); }) } #[test] fn invalid_read_bad_hash_large_data() { block_on(async { let bytes: &[u8] = &[0x00; 64 * 1024]; let mut context = digest::Context::new(&SHA256); context.update(&bytes); context.update(&[0xFF]); // evil bytes let hash_value = HashValue::new(context.finish().as_ref().to_vec()); let mut reader = SafeReader::new( bytes, bytes.len() as u64, 0, Some((&HashAlgorithm::Sha256, hash_value)), ) .unwrap(); let mut buf = Vec::new(); assert!(reader.read_to_end(&mut buf).await.is_err()); }) } }
34.350554
99
0.520034
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn valid_read() {\n block_on(async {\n let bytes: &[u8] = &[0x00, 0x01, 0x02, 0x03];\n let mut reader = SafeReader::new(bytes, bytes.len() as u64, 0, None).unwrap();\n let mut buf = Vec::new();\n assert!(reader.read_to_end(&mut buf).await.is_ok());\n assert_eq!(buf, bytes);\n })\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn valid_read_large_data() {\n block_on(async {\n let bytes: &[u8] = &[0x00; 64 * 1024];\n let mut reader = SafeReader::new(bytes, bytes.len() as u64, 0, None).unwrap();\n let mut buf = Vec::new();\n assert!(reader.read_to_end(&mut buf).await.is_ok());\n assert_eq!(buf, bytes);\n })\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn valid_read_below_max_size() {\n block_on(async {\n let bytes: &[u8] = &[0x00, 0x01, 0x02, 0x03];\n let mut reader = SafeReader::new(bytes, (bytes.len() as u64) + 1, 0, None).unwrap();\n let mut buf = Vec::new();\n assert!(reader.read_to_end(&mut buf).await.is_ok());\n assert_eq!(buf, bytes);\n })\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn invalid_read_above_max_size() {\n block_on(async {\n let bytes: &[u8] = &[0x00, 0x01, 0x02, 0x03];\n let mut reader = SafeReader::new(bytes, (bytes.len() as u64) - 1, 0, None).unwrap();\n let mut buf = Vec::new();\n assert!(reader.read_to_end(&mut buf).await.is_err());\n })\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn invalid_read_above_max_size_large_data() {\n block_on(async {\n let bytes: &[u8] = &[0x00; 64 * 1024];\n let mut reader = SafeReader::new(bytes, (bytes.len() as u64) - 1, 0, None).unwrap();\n let mut buf = Vec::new();\n assert!(reader.read_to_end(&mut buf).await.is_err());\n })\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn valid_read_good_hash() {\n block_on(async {\n let bytes: &[u8] = &[0x00, 0x01, 0x02, 0x03];\n let mut context = digest::Context::new(&SHA256);\n context.update(&bytes);\n let hash_value = HashValue::new(context.finish().as_ref().to_vec());\n let mut reader = SafeReader::new(\n bytes,\n bytes.len() as u64,\n 0,\n Some((&HashAlgorithm::Sha256, hash_value)),\n )\n .unwrap();\n let mut buf = Vec::new();\n assert!(reader.read_to_end(&mut buf).await.is_ok());\n assert_eq!(buf, bytes);\n })\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn invalid_read_bad_hash() {\n block_on(async {\n let bytes: &[u8] = &[0x00, 0x01, 0x02, 0x03];\n let mut context = digest::Context::new(&SHA256);\n context.update(&bytes);\n context.update(&[0xFF]); // evil bytes\n let hash_value = HashValue::new(context.finish().as_ref().to_vec());\n let mut reader = SafeReader::new(\n bytes,\n bytes.len() as u64,\n 0,\n Some((&HashAlgorithm::Sha256, hash_value)),\n )\n .unwrap();\n let mut buf = Vec::new();\n assert!(reader.read_to_end(&mut buf).await.is_err());\n })\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn valid_read_good_hash_large_data() {\n block_on(async {\n let bytes: &[u8] = &[0x00; 64 * 1024];\n let mut context = digest::Context::new(&SHA256);\n context.update(&bytes);\n let hash_value = HashValue::new(context.finish().as_ref().to_vec());\n let mut reader = SafeReader::new(\n bytes,\n bytes.len() as u64,\n 0,\n Some((&HashAlgorithm::Sha256, hash_value)),\n )\n .unwrap();\n let mut buf = Vec::new();\n assert!(reader.read_to_end(&mut buf).await.is_ok());\n assert_eq!(buf, bytes);\n })\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn invalid_read_bad_hash_large_data() {\n block_on(async {\n let bytes: &[u8] = &[0x00; 64 * 1024];\n let mut context = digest::Context::new(&SHA256);\n context.update(&bytes);\n context.update(&[0xFF]); // evil bytes\n let hash_value = HashValue::new(context.finish().as_ref().to_vec());\n let mut reader = SafeReader::new(\n bytes,\n bytes.len() as u64,\n 0,\n Some((&HashAlgorithm::Sha256, hash_value)),\n )\n .unwrap();\n let mut buf = Vec::new();\n assert!(reader.read_to_end(&mut buf).await.is_err());\n })\n }\n}" ]
f7023ef38590c3d50f3eb561efad5d6f442fdacb
9,332
rs
Rust
src/serialization/v1.rs
akkoro/macaroon
6015828846f11251248fcdd717e015c33bab8b70
[ "MIT" ]
14
2020-05-26T07:49:44.000Z
2022-01-16T22:16:03.000Z
src/serialization/v1.rs
akkoro/macaroon
6015828846f11251248fcdd717e015c33bab8b70
[ "MIT" ]
40
2020-05-11T11:34:28.000Z
2022-03-23T12:19:48.000Z
src/serialization/v1.rs
akkoro/macaroon
6015828846f11251248fcdd717e015c33bab8b70
[ "MIT" ]
5
2020-09-10T08:15:16.000Z
2021-10-31T05:44:25.000Z
use caveat::{Caveat, CaveatBuilder}; use error::MacaroonError; use serialization::macaroon_builder::MacaroonBuilder; use std::str; use ByteString; use Macaroon; use Result; // Version 1 fields const LOCATION: &str = "location"; const IDENTIFIER: &str = "identifier"; const SIGNATURE: &str = "signature"; const CID: &str = "cid"; const VID: &str = "vid"; const CL: &str = "cl"; const HEADER_SIZE: usize = 4; fn serialize_as_packet<'r>(tag: &'r str, value: &'r [u8]) -> Vec<u8> { let mut packet: Vec<u8> = Vec::new(); let size = HEADER_SIZE + 2 + tag.len() + value.len(); packet.extend(packet_header(size)); packet.extend_from_slice(tag.as_bytes()); packet.extend_from_slice(b" "); packet.extend_from_slice(value); packet.extend_from_slice(b"\n"); packet } fn to_hex_char(value: u8) -> u8 { let hex = format!("{:1x}", value); hex.as_bytes()[0] } fn packet_header(size: usize) -> Vec<u8> { vec![ to_hex_char(((size >> 12) & 15) as u8), to_hex_char(((size >> 8) & 15) as u8), to_hex_char(((size >> 4) & 15) as u8), to_hex_char((size & 15) as u8), ] } pub fn serialize(macaroon: &Macaroon) -> Result<Vec<u8>> { let mut serialized: Vec<u8> = Vec::new(); if let Some(ref location) = macaroon.location() { serialized.extend(serialize_as_packet(LOCATION, location.as_bytes())); }; serialized.extend(serialize_as_packet(IDENTIFIER, &macaroon.identifier().0)); for c in macaroon.caveats() { match c { Caveat::FirstParty(fp) => { serialized.extend(serialize_as_packet(CID, &fp.predicate().0)); } Caveat::ThirdParty(tp) => { serialized.extend(serialize_as_packet(CID, &tp.id().0)); serialized.extend(serialize_as_packet(VID, &tp.verifier_id().0)); serialized.extend(serialize_as_packet(CL, tp.location().as_bytes())) } } } serialized.extend(serialize_as_packet(SIGNATURE, &macaroon.signature())); Ok(base64::encode_config(&serialized, base64::URL_SAFE) .as_bytes() .to_vec()) } fn base64_decode(s: &str) -> Result<Vec<u8>> { Ok(base64::decode_config(s, base64::URL_SAFE)?) } struct Packet { key: String, value: Vec<u8>, } fn deserialize_as_packets(data: &[u8], mut packets: Vec<Packet>) -> Result<Vec<Packet>> { if data.is_empty() { return Ok(packets); } let hex: &str = str::from_utf8(&data[..4])?; let size: usize = usize::from_str_radix(hex, 16)?; let packet_data = &data[4..size]; let index = split_index(packet_data)?; let (key_slice, value_slice) = packet_data.split_at(index); packets.push(Packet { key: String::from_utf8(key_slice.to_vec())?, // skip beginning space and terminating \n value: value_slice[1..value_slice.len() - 1].to_vec(), }); deserialize_as_packets(&data[size..], packets) } fn split_index(packet: &[u8]) -> Result<usize> { match packet.iter().position(|&r| r == b' ') { Some(index) => Ok(index), None => Err(MacaroonError::DeserializationError(String::from( "Key/value error", ))), } } pub fn deserialize(base64: &[u8]) -> Result<Macaroon> { let data = base64_decode(&String::from_utf8(base64.to_vec())?)?; let mut builder: MacaroonBuilder = MacaroonBuilder::new(); let mut caveat_builder: CaveatBuilder = CaveatBuilder::new(); for packet in deserialize_as_packets(data.as_slice(), Vec::new())? { match packet.key.as_str() { LOCATION => { builder.set_location(&String::from_utf8(packet.value)?); } IDENTIFIER => { builder.set_identifier(ByteString(packet.value)); } SIGNATURE => { if caveat_builder.has_id() { builder.add_caveat(caveat_builder.build()?); caveat_builder = CaveatBuilder::new(); } if packet.value.len() != 32 { error!( "deserialize_v1: Deserialization error - signature length is {}", packet.value.len() ); return Err(MacaroonError::DeserializationError(String::from( "Illegal signature \ length in \ packet", ))); } builder.set_signature(&packet.value); } CID => { if caveat_builder.has_id() { builder.add_caveat(caveat_builder.build()?); caveat_builder = CaveatBuilder::new(); caveat_builder.add_id(ByteString(packet.value)); } else { caveat_builder.add_id(ByteString(packet.value)); } } VID => { caveat_builder.add_verifier_id(ByteString(packet.value)); } CL => caveat_builder.add_location(String::from_utf8(packet.value)?), _ => { return Err(MacaroonError::DeserializationError(String::from( "Unknown key", ))) } }; } builder.build() } #[cfg(test)] mod tests { use ByteString; use Caveat; use Macaroon; use MacaroonKey; #[test] fn test_deserialize() { let mut serialized = "MDAyMWxvY2F0aW9uIGh0dHA6Ly9leGFtcGxlLm9yZy8KMDAxNWlkZW50aWZpZXIga2V5aWQKMDAyZnNpZ25hdHVyZSB83ueSURxbxvUoSFgF3-myTnheKOKpkwH51xHGCeOO9wo"; let mut signature: MacaroonKey = [ 124, 222, 231, 146, 81, 28, 91, 198, 245, 40, 72, 88, 5, 223, 233, 178, 78, 120, 94, 40, 226, 169, 147, 1, 249, 215, 17, 198, 9, 227, 142, 247, ] .into(); let macaroon = super::deserialize(&serialized.as_bytes().to_vec()).unwrap(); assert!(macaroon.location().is_some()); assert_eq!("http://example.org/", &macaroon.location().unwrap()); assert_eq!(ByteString::from("keyid"), macaroon.identifier()); assert_eq!(signature, macaroon.signature()); serialized = "MDAyMWxvY2F0aW9uIGh0dHA6Ly9leGFtcGxlLm9yZy8KMDAxNWlkZW50aWZpZXIga2V5aWQKMDAxZGNpZCBhY2NvdW50ID0gMzczNTkyODU1OQowMDJmc2lnbmF0dXJlIPVIB_bcbt-Ivw9zBrOCJWKjYlM9v3M5umF2XaS9JZ2HCg"; signature = [ 245, 72, 7, 246, 220, 110, 223, 136, 191, 15, 115, 6, 179, 130, 37, 98, 163, 98, 83, 61, 191, 115, 57, 186, 97, 118, 93, 164, 189, 37, 157, 135, ] .into(); let macaroon = super::deserialize(&serialized.as_bytes().to_vec()).unwrap(); assert!(macaroon.location().is_some()); assert_eq!("http://example.org/", &macaroon.location().unwrap()); assert_eq!(ByteString::from("keyid"), macaroon.identifier()); assert_eq!(1, macaroon.caveats().len()); let predicate = match &macaroon.caveats()[0] { Caveat::FirstParty(fp) => fp.predicate(), _ => ByteString::default(), }; assert_eq!(ByteString::from("account = 3735928559"), predicate); assert_eq!(signature, macaroon.signature()); } #[test] fn test_deserialize_two_caveats() { let serialized = "MDAyMWxvY2F0aW9uIGh0dHA6Ly9leGFtcGxlLm9yZy8KMDAxNWlkZW50aWZpZXIga2V5aWQKMDAxZGNpZCBhY2NvdW50ID0gMzczNTkyODU1OQowMDE1Y2lkIHVzZXIgPSBhbGljZQowMDJmc2lnbmF0dXJlIEvpZ80eoMaya69qSpTumwWxWIbaC6hejEKpPI0OEl78Cg"; let signature: MacaroonKey = [ 75, 233, 103, 205, 30, 160, 198, 178, 107, 175, 106, 74, 148, 238, 155, 5, 177, 88, 134, 218, 11, 168, 94, 140, 66, 169, 60, 141, 14, 18, 94, 252, ] .into(); let macaroon = super::deserialize(&serialized.as_bytes().to_vec()).unwrap(); assert!(macaroon.location().is_some()); assert_eq!("http://example.org/", &macaroon.location().unwrap()); assert_eq!(ByteString::from("keyid"), macaroon.identifier()); assert_eq!(signature, macaroon.signature()); assert_eq!(2, macaroon.caveats().len()); let predicate = match &macaroon.caveats()[0] { Caveat::FirstParty(fp) => fp.predicate(), _ => ByteString::default(), }; assert_eq!(ByteString::from("account = 3735928559"), predicate); let predicate = match &macaroon.caveats()[1] { Caveat::FirstParty(fp) => fp.predicate(), _ => ByteString::default(), }; assert_eq!(ByteString::from("user = alice"), predicate); } #[test] fn test_serialize_deserialize() { let mut macaroon: Macaroon = Macaroon::create( Some("http://example.org/".into()), &"my key".into(), "keyid".into(), ) .unwrap(); macaroon.add_first_party_caveat("account = 3735928559".into()); macaroon.add_first_party_caveat("user = alice".into()); macaroon.add_third_party_caveat( "https://auth.mybank.com", &"caveat key".into(), "caveat".into(), ); let serialized = macaroon.serialize(super::super::Format::V1).unwrap(); let deserialized = Macaroon::deserialize(&serialized).unwrap(); assert_eq!(macaroon, deserialized); } }
38.561983
230
0.583155
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_deserialize() {\n let mut serialized = \"MDAyMWxvY2F0aW9uIGh0dHA6Ly9leGFtcGxlLm9yZy8KMDAxNWlkZW50aWZpZXIga2V5aWQKMDAyZnNpZ25hdHVyZSB83ueSURxbxvUoSFgF3-myTnheKOKpkwH51xHGCeOO9wo\";\n let mut signature: MacaroonKey = [\n 124, 222, 231, 146, 81, 28, 91, 198, 245, 40, 72, 88, 5, 223, 233, 178, 78, 120, 94,\n 40, 226, 169, 147, 1, 249, 215, 17, 198, 9, 227, 142, 247,\n ]\n .into();\n let macaroon = super::deserialize(&serialized.as_bytes().to_vec()).unwrap();\n assert!(macaroon.location().is_some());\n assert_eq!(\"http://example.org/\", &macaroon.location().unwrap());\n assert_eq!(ByteString::from(\"keyid\"), macaroon.identifier());\n assert_eq!(signature, macaroon.signature());\n serialized = \"MDAyMWxvY2F0aW9uIGh0dHA6Ly9leGFtcGxlLm9yZy8KMDAxNWlkZW50aWZpZXIga2V5aWQKMDAxZGNpZCBhY2NvdW50ID0gMzczNTkyODU1OQowMDJmc2lnbmF0dXJlIPVIB_bcbt-Ivw9zBrOCJWKjYlM9v3M5umF2XaS9JZ2HCg\";\n signature = [\n 245, 72, 7, 246, 220, 110, 223, 136, 191, 15, 115, 6, 179, 130, 37, 98, 163, 98, 83,\n 61, 191, 115, 57, 186, 97, 118, 93, 164, 189, 37, 157, 135,\n ]\n .into();\n let macaroon = super::deserialize(&serialized.as_bytes().to_vec()).unwrap();\n assert!(macaroon.location().is_some());\n assert_eq!(\"http://example.org/\", &macaroon.location().unwrap());\n assert_eq!(ByteString::from(\"keyid\"), macaroon.identifier());\n assert_eq!(1, macaroon.caveats().len());\n let predicate = match &macaroon.caveats()[0] {\n Caveat::FirstParty(fp) => fp.predicate(),\n _ => ByteString::default(),\n };\n assert_eq!(ByteString::from(\"account = 3735928559\"), predicate);\n assert_eq!(signature, macaroon.signature());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_deserialize_two_caveats() {\n let serialized = \"MDAyMWxvY2F0aW9uIGh0dHA6Ly9leGFtcGxlLm9yZy8KMDAxNWlkZW50aWZpZXIga2V5aWQKMDAxZGNpZCBhY2NvdW50ID0gMzczNTkyODU1OQowMDE1Y2lkIHVzZXIgPSBhbGljZQowMDJmc2lnbmF0dXJlIEvpZ80eoMaya69qSpTumwWxWIbaC6hejEKpPI0OEl78Cg\";\n let signature: MacaroonKey = [\n 75, 233, 103, 205, 30, 160, 198, 178, 107, 175, 106, 74, 148, 238, 155, 5, 177, 88,\n 134, 218, 11, 168, 94, 140, 66, 169, 60, 141, 14, 18, 94, 252,\n ]\n .into();\n let macaroon = super::deserialize(&serialized.as_bytes().to_vec()).unwrap();\n assert!(macaroon.location().is_some());\n assert_eq!(\"http://example.org/\", &macaroon.location().unwrap());\n assert_eq!(ByteString::from(\"keyid\"), macaroon.identifier());\n assert_eq!(signature, macaroon.signature());\n assert_eq!(2, macaroon.caveats().len());\n let predicate = match &macaroon.caveats()[0] {\n Caveat::FirstParty(fp) => fp.predicate(),\n _ => ByteString::default(),\n };\n assert_eq!(ByteString::from(\"account = 3735928559\"), predicate);\n let predicate = match &macaroon.caveats()[1] {\n Caveat::FirstParty(fp) => fp.predicate(),\n _ => ByteString::default(),\n };\n assert_eq!(ByteString::from(\"user = alice\"), predicate);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_serialize_deserialize() {\n let mut macaroon: Macaroon = Macaroon::create(\n Some(\"http://example.org/\".into()),\n &\"my key\".into(),\n \"keyid\".into(),\n )\n .unwrap();\n macaroon.add_first_party_caveat(\"account = 3735928559\".into());\n macaroon.add_first_party_caveat(\"user = alice\".into());\n macaroon.add_third_party_caveat(\n \"https://auth.mybank.com\",\n &\"caveat key\".into(),\n \"caveat\".into(),\n );\n let serialized = macaroon.serialize(super::super::Format::V1).unwrap();\n let deserialized = Macaroon::deserialize(&serialized).unwrap();\n assert_eq!(macaroon, deserialized);\n }\n}" ]
f70262df02c7d1c2e8c07a1c139c588f5708a655
10,018
rs
Rust
third-party/RustaCUDA/src/module.rs
fossabot/necsim-rust
996b6a6977bc27a997a123e3e4f5a7b11e1a1aef
[ "Apache-2.0", "MIT" ]
null
null
null
third-party/RustaCUDA/src/module.rs
fossabot/necsim-rust
996b6a6977bc27a997a123e3e4f5a7b11e1a1aef
[ "Apache-2.0", "MIT" ]
null
null
null
third-party/RustaCUDA/src/module.rs
fossabot/necsim-rust
996b6a6977bc27a997a123e3e4f5a7b11e1a1aef
[ "Apache-2.0", "MIT" ]
null
null
null
//! Functions and types for working with CUDA modules. use crate::{ error::{CudaResult, DropResult, IntoResult}, function::Function, memory::{CopyDestination, DeviceCopy, DevicePointer}, }; use cuda_driver_sys as cuda; use std::{ ffi::{c_void, CStr}, fmt, marker::PhantomData, mem, ptr, }; /// A compiled CUDA module, loaded into a context. #[derive(Debug)] pub struct Module { inner: cuda::CUmodule, } impl Module { /// Load a module from the given file name into the current context. /// /// The given file should be either a cubin file, a ptx file, or a fatbin /// file such as those produced by `nvcc`. /// /// # Example /// /// ``` /// # use rustacuda::*; /// # use std::error::Error; /// # fn main() -> Result<(), Box<dyn Error>> { /// # let _ctx = quick_init()?; /// use rustacuda::module::Module; /// use std::ffi::CString; /// /// let filename = CString::new("./resources/add.ptx")?; /// let module = Module::load_from_file(&filename)?; /// # Ok(()) /// # } /// ``` pub fn load_from_file(filename: &CStr) -> CudaResult<Module> { unsafe { let mut module = Module { inner: ptr::null_mut(), }; cuda::cuModuleLoad(&mut module.inner as *mut cuda::CUmodule, filename.as_ptr()) .into_result()?; Ok(module) } } /// Load a module from a CStr. /// /// This is useful in combination with `include_str!`, to include the device /// code into the compiled executable. /// /// The given CStr must contain the bytes of a cubin file, a ptx file or a /// fatbin file such as those produced by `nvcc`. /// /// # Example /// /// ``` /// # use rustacuda::*; /// # use std::error::Error; /// # fn main() -> Result<(), Box<dyn Error>> { /// # let _ctx = quick_init()?; /// use rustacuda::module::Module; /// use std::ffi::CString; /// /// let image = CString::new(include_str!("../resources/add.ptx"))?; /// let module = Module::load_from_string(&image)?; /// # Ok(()) /// # } /// ``` pub fn load_from_string(image: &CStr) -> CudaResult<Module> { unsafe { let mut module = Module { inner: ptr::null_mut(), }; cuda::cuModuleLoadData( &mut module.inner as *mut cuda::CUmodule, image.as_ptr() as *const c_void, ) .into_result()?; Ok(module) } } /// Get a reference to a global symbol, which can then be copied to/from. /// /// # Panics: /// /// This function panics if the size of the symbol is not the same as the /// `mem::sizeof<T>()`. /// /// # Examples /// /// ``` /// # use rustacuda::*; /// # use rustacuda::memory::CopyDestination; /// # use std::error::Error; /// # fn main() -> Result<(), Box<dyn Error>> { /// # let _ctx = quick_init()?; /// use rustacuda::module::Module; /// use std::ffi::CString; /// /// let ptx = CString::new(include_str!("../resources/add.ptx"))?; /// let module = Module::load_from_string(&ptx)?; /// let name = CString::new("my_constant")?; /// let symbol = module.get_global::<u32>(&name)?; /// let mut host_const = 0; /// symbol.copy_to(&mut host_const)?; /// assert_eq!(314, host_const); /// # Ok(()) /// # } /// ``` pub fn get_global<'a, T: DeviceCopy>(&'a self, name: &CStr) -> CudaResult<Symbol<'a, T>> { unsafe { let mut ptr: DevicePointer<T> = DevicePointer::null(); let mut size: usize = 0; cuda::cuModuleGetGlobal_v2( &mut ptr as *mut DevicePointer<T> as *mut cuda::CUdeviceptr, &mut size as *mut usize, self.inner, name.as_ptr(), ) .into_result()?; assert_eq!(size, mem::size_of::<T>()); Ok(Symbol { ptr, module: PhantomData, }) } } /// Get a reference to a kernel function which can then be launched. /// /// # Examples /// /// ``` /// # use rustacuda::*; /// # use std::error::Error; /// # fn main() -> Result<(), Box<dyn Error>> { /// # let _ctx = quick_init()?; /// use rustacuda::module::Module; /// use std::ffi::CString; /// /// let ptx = CString::new(include_str!("../resources/add.ptx"))?; /// let module = Module::load_from_string(&ptx)?; /// let name = CString::new("sum")?; /// let function = module.get_function(&name)?; /// # Ok(()) /// # } /// ``` pub fn get_function<'a>(&'a self, name: &CStr) -> CudaResult<Function<'a>> { unsafe { let mut func: cuda::CUfunction = ptr::null_mut(); cuda::cuModuleGetFunction( &mut func as *mut cuda::CUfunction, self.inner, name.as_ptr(), ) .into_result()?; Ok(Function::new(func, self)) } } /// Destroy a `Module`, returning an error. /// /// Destroying a module can return errors from previous asynchronous work. /// This function destroys the given module and returns the error and /// the un-destroyed module on failure. /// /// # Example /// /// ``` /// # use rustacuda::*; /// # use std::error::Error; /// # fn main() -> Result<(), Box<dyn Error>> { /// # let _ctx = quick_init()?; /// use rustacuda::module::Module; /// use std::ffi::CString; /// /// let ptx = CString::new(include_str!("../resources/add.ptx"))?; /// let module = Module::load_from_string(&ptx)?; /// match Module::drop(module) { /// Ok(()) => println!("Successfully destroyed"), /// Err((e, module)) => { /// println!("Failed to destroy module: {:?}", e); /// // Do something with module /// }, /// } /// # Ok(()) /// # } /// ``` pub fn drop(mut module: Module) -> DropResult<Module> { if module.inner.is_null() { return Ok(()); } unsafe { let inner = mem::replace(&mut module.inner, ptr::null_mut()); match cuda::cuModuleUnload(inner).into_result() { Ok(()) => { mem::forget(module); Ok(()) }, Err(e) => Err((e, Module { inner })), } } } } impl Drop for Module { fn drop(&mut self) { if self.inner.is_null() { return; } unsafe { // No choice but to panic if this fails... let module = mem::replace(&mut self.inner, ptr::null_mut()); cuda::cuModuleUnload(module) .into_result() .expect("Failed to unload CUDA module"); } } } /// Handle to a symbol defined within a CUDA module. #[derive(Debug)] pub struct Symbol<'a, T: DeviceCopy> { ptr: DevicePointer<T>, module: PhantomData<&'a Module>, } impl<'a, T: DeviceCopy> crate::private::Sealed for Symbol<'a, T> {} impl<'a, T: DeviceCopy> fmt::Pointer for Symbol<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Pointer::fmt(&self.ptr, f) } } impl<'a, T: DeviceCopy> CopyDestination<T> for Symbol<'a, T> { fn copy_from(&mut self, val: &T) -> CudaResult<()> { let size = mem::size_of::<T>(); if size != 0 { unsafe { cuda::cuMemcpyHtoD_v2( self.ptr.as_raw_mut() as u64, val as *const T as *const c_void, size, ) .into_result()? } } Ok(()) } fn copy_to(&self, val: &mut T) -> CudaResult<()> { let size = mem::size_of::<T>(); if size != 0 { unsafe { cuda::cuMemcpyDtoH_v2( val as *const T as *mut c_void, self.ptr.as_raw() as u64, size, ) .into_result()? } } Ok(()) } } #[cfg(test)] mod test { use super::*; use crate::quick_init; use std::{error::Error, ffi::CString}; #[test] fn test_load_from_file() -> Result<(), Box<dyn Error>> { let _context = quick_init(); let filename = CString::new("./resources/add.ptx")?; let module = Module::load_from_file(&filename)?; drop(module); Ok(()) } #[test] fn test_load_from_memory() -> Result<(), Box<dyn Error>> { let _context = quick_init(); let ptx_text = CString::new(include_str!("../resources/add.ptx"))?; let module = Module::load_from_string(&ptx_text)?; drop(module); Ok(()) } #[test] fn test_copy_from_module() -> Result<(), Box<dyn Error>> { let _context = quick_init(); let ptx = CString::new(include_str!("../resources/add.ptx"))?; let module = Module::load_from_string(&ptx)?; let constant_name = CString::new("my_constant")?; let symbol = module.get_global::<u32>(&constant_name)?; let mut constant_copy = 0u32; symbol.copy_to(&mut constant_copy)?; assert_eq!(314, constant_copy); Ok(()) } #[test] fn test_copy_to_module() -> Result<(), Box<dyn Error>> { let _context = quick_init(); let ptx = CString::new(include_str!("../resources/add.ptx"))?; let module = Module::load_from_string(&ptx)?; let constant_name = CString::new("my_constant")?; let mut symbol = module.get_global::<u32>(&constant_name)?; symbol.copy_from(&100)?; let mut constant_copy = 0u32; symbol.copy_to(&mut constant_copy)?; assert_eq!(100, constant_copy); Ok(()) } }
29.994012
94
0.50539
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_load_from_file() -> Result<(), Box<dyn Error>> {\n let _context = quick_init();\n\n let filename = CString::new(\"./resources/add.ptx\")?;\n let module = Module::load_from_file(&filename)?;\n drop(module);\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_load_from_memory() -> Result<(), Box<dyn Error>> {\n let _context = quick_init();\n let ptx_text = CString::new(include_str!(\"../resources/add.ptx\"))?;\n let module = Module::load_from_string(&ptx_text)?;\n drop(module);\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_copy_from_module() -> Result<(), Box<dyn Error>> {\n let _context = quick_init();\n\n let ptx = CString::new(include_str!(\"../resources/add.ptx\"))?;\n let module = Module::load_from_string(&ptx)?;\n\n let constant_name = CString::new(\"my_constant\")?;\n let symbol = module.get_global::<u32>(&constant_name)?;\n\n let mut constant_copy = 0u32;\n symbol.copy_to(&mut constant_copy)?;\n assert_eq!(314, constant_copy);\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_copy_to_module() -> Result<(), Box<dyn Error>> {\n let _context = quick_init();\n\n let ptx = CString::new(include_str!(\"../resources/add.ptx\"))?;\n let module = Module::load_from_string(&ptx)?;\n\n let constant_name = CString::new(\"my_constant\")?;\n let mut symbol = module.get_global::<u32>(&constant_name)?;\n\n symbol.copy_from(&100)?;\n\n let mut constant_copy = 0u32;\n symbol.copy_to(&mut constant_copy)?;\n assert_eq!(100, constant_copy);\n Ok(())\n }\n}" ]
f702ab0b22e585a7973fceb4875d1617be3b947c
8,766
rs
Rust
src/names/ncname.rs
lo48576/xml-string
dd589ba6216e33b72f88f2ad7add3a92a3b21c54
[ "Apache-2.0", "MIT" ]
null
null
null
src/names/ncname.rs
lo48576/xml-string
dd589ba6216e33b72f88f2ad7add3a92a3b21c54
[ "Apache-2.0", "MIT" ]
null
null
null
src/names/ncname.rs
lo48576/xml-string
dd589ba6216e33b72f88f2ad7add3a92a3b21c54
[ "Apache-2.0", "MIT" ]
null
null
null
//! [`NCName`]. //! //! [`NCName`]: https://www.w3.org/TR/2009/REC-xml-names-20091208/#NT-NCName use core::convert::TryFrom; use crate::names::chars; use crate::names::error::{NameError, TargetNameType}; use crate::names::{Eqname, Name, Nmtoken, Qname}; /// String slice for [`NCName`]. /// /// [`NCName`]: https://www.w3.org/TR/2009/REC-xml-names-20091208/#NT-NCName #[derive(PartialEq, Eq, PartialOrd, Ord, Hash)] #[repr(transparent)] pub struct Ncname(str); #[allow(clippy::len_without_is_empty)] impl Ncname { /// Creates a new `&Ncname`. /// /// # Failures /// /// Fails if the given string is not a valid [`NCName`]. /// /// # Examples /// /// ``` /// # use xml_string::names::Ncname; /// let name = Ncname::from_str("hello")?; /// assert_eq!(name, "hello"); /// /// assert!(Ncname::from_str("").is_err(), "Empty string is not an NCName"); /// assert!(Ncname::from_str("foo bar").is_err(), "Whitespace is not allowed"); /// assert!(Ncname::from_str("foo:bar").is_err(), "Colon is not allowed"); /// assert!(Ncname::from_str("0foo").is_err(), "ASCII digit at the beginning is not allowed"); /// # Ok::<_, xml_string::names::NameError>(()) /// ``` /// /// [`NCName`]: https://www.w3.org/TR/2009/REC-xml-names-20091208/#NT-NCName // `FromStr` can be implemented only for types with static lifetime. #[allow(clippy::should_implement_trait)] #[inline] pub fn from_str(s: &str) -> Result<&Self, NameError> { <&Self>::try_from(s) } /// Creates a new `&Ncname` without validation. /// /// # Safety /// /// The given string should be a valid [`NCName`]. /// /// # Examples /// /// ``` /// # use xml_string::names::Ncname; /// let name = unsafe { /// Ncname::new_unchecked("hello") /// }; /// assert_eq!(name, "hello"); /// ``` /// /// [`NCName`]: https://www.w3.org/TR/2009/REC-xml-names-20091208/#NT-NCName #[inline] #[must_use] pub unsafe fn new_unchecked(s: &str) -> &Self { &*(s as *const str as *const Self) } /// Validates the given string. fn validate(s: &str) -> Result<(), NameError> { let mut chars = s.char_indices(); // Check the first character. if !chars .next() .map_or(false, |(_, c)| chars::is_ncname_start(c)) { return Err(NameError::new(TargetNameType::Ncname, 0)); } // Check the following characters. if let Some((i, _)) = chars.find(|(_, c)| !chars::is_ncname_continue(*c)) { return Err(NameError::new(TargetNameType::Ncname, i)); } Ok(()) } /// Returns the string as `&str`. /// /// # Examples /// /// ``` /// # use xml_string::names::Ncname; /// let name = Ncname::from_str("hello")?; /// assert_eq!(name, "hello"); /// /// let s: &str = name.as_str(); /// assert_eq!(s, "hello"); /// # Ok::<_, xml_string::names::NameError>(()) /// ``` #[inline] #[must_use] pub fn as_str(&self) -> &str { &self.0 } /// Returns the length of the string in bytes. /// /// # Examples /// /// ``` /// # use xml_string::names::Ncname; /// let name = Ncname::from_str("foo")?; /// assert_eq!(name.len(), 3); /// # Ok::<_, xml_string::names::NameError>(()) /// ``` #[inline] #[must_use] pub fn len(&self) -> usize { self.0.len() } /// Parses the leading `Ncname` and returns the value and the rest input. /// /// # Exmaples /// /// ``` /// # use xml_string::names::Ncname; /// let input = "hello:world"; /// let expected = Ncname::from_str("hello").expect("valid NCName"); /// assert_eq!( /// Ncname::parse_next(input), /// Ok((expected, ":world")) /// ); /// # Ok::<_, xml_string::names::NameError>(()) /// ``` /// /// ``` /// # use xml_string::names::Ncname; /// let input = "012"; /// assert!(Ncname::parse_next(input).is_err()); /// # Ok::<_, xml_string::names::NameError>(()) /// ``` pub fn parse_next(s: &str) -> Result<(&Self, &str), NameError> { match Self::from_str(s) { Ok(v) => Ok((v, &s[s.len()..])), Err(e) if e.valid_up_to() == 0 => Err(e), Err(e) => { let valid_up_to = e.valid_up_to(); let v = unsafe { let valid = &s[..valid_up_to]; debug_assert!(Self::validate(valid).is_ok()); // This is safe because the substring is valid. Self::new_unchecked(valid) }; Ok((v, &s[valid_up_to..])) } } } /// Converts a `Box<Ncname>` into a `Box<str>` without copying or allocating. /// /// # Examples /// /// ``` /// # use xml_string::names::Ncname; /// let name = Ncname::from_str("ncname")?; /// let boxed_name: Box<Ncname> = name.into(); /// assert_eq!(&*boxed_name, name); /// let boxed_str: Box<str> = boxed_name.into_boxed_str(); /// assert_eq!(&*boxed_str, name.as_str()); /// # Ok::<_, xml_string::names::NameError>(()) /// ``` #[cfg(feature = "alloc")] pub fn into_boxed_str(self: alloc::boxed::Box<Self>) -> Box<str> { unsafe { // This is safe because `Ncname` has the same memory layout as `str` // (thanks to `#[repr(transparent)]`). alloc::boxed::Box::<str>::from_raw(alloc::boxed::Box::<Self>::into_raw(self) as *mut str) } } } impl_traits_for_custom_string_slice!(Ncname); impl AsRef<Nmtoken> for Ncname { #[inline] fn as_ref(&self) -> &Nmtoken { unsafe { debug_assert!( Nmtoken::from_str(self.as_str()).is_ok(), "NCName {:?} must be a valid Nmtoken", self.as_str() ); // This is safe because an NCName is also a valid Nmtoken. Nmtoken::new_unchecked(self.as_str()) } } } impl AsRef<Name> for Ncname { #[inline] fn as_ref(&self) -> &Name { unsafe { debug_assert!( Name::from_str(self.as_str()).is_ok(), "An NCName is also a Name" ); Name::new_unchecked(self.as_str()) } } } impl AsRef<Qname> for Ncname { #[inline] fn as_ref(&self) -> &Qname { unsafe { debug_assert!( Qname::from_str(self.as_str()).is_ok(), "An NCName is also a Qname" ); Qname::new_unchecked(self.as_str()) } } } impl AsRef<Eqname> for Ncname { #[inline] fn as_ref(&self) -> &Eqname { unsafe { debug_assert!( Eqname::from_str(self.as_str()).is_ok(), "An NCName is also a Eqname" ); Eqname::new_unchecked(self.as_str()) } } } impl<'a> TryFrom<&'a str> for &'a Ncname { type Error = NameError; fn try_from(s: &'a str) -> Result<Self, Self::Error> { Ncname::validate(s)?; Ok(unsafe { // This is safe because the string is validated. Ncname::new_unchecked(s) }) } } impl<'a> TryFrom<&'a Name> for &'a Ncname { type Error = NameError; fn try_from(s: &'a Name) -> Result<Self, Self::Error> { if let Some(colon_pos) = s.as_str().find(':') { return Err(NameError::new(TargetNameType::Ncname, colon_pos)); } unsafe { debug_assert!( Ncname::validate(s.as_str()).is_ok(), "Name {:?} without colons is also a valid NCName", s.as_str() ); Ok(Ncname::new_unchecked(s.as_str())) } } } #[cfg(test)] mod tests { use super::*; fn ensure_eq(s: &str) { assert_eq!( Ncname::from_str(s).expect("Should not fail"), s, "String: {:?}", s ); } fn ensure_error_at(s: &str, valid_up_to: usize) { let err = Ncname::from_str(s).expect_err("Should fail"); assert_eq!(err.valid_up_to(), valid_up_to, "String: {:?}", s); } #[test] fn ncname_str_valid() { ensure_eq("hello"); ensure_eq("abc123"); } #[test] fn ncname_str_invalid() { ensure_error_at("", 0); ensure_error_at("-foo", 0); ensure_error_at("0foo", 0); ensure_error_at("foo bar", 3); ensure_error_at("foo/bar", 3); ensure_error_at("foo:bar", 3); ensure_error_at(":foo", 0); ensure_error_at("foo:", 3); } }
28.186495
101
0.509354
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn ncname_str_valid() {\n ensure_eq(\"hello\");\n ensure_eq(\"abc123\");\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn ncname_str_invalid() {\n ensure_error_at(\"\", 0);\n ensure_error_at(\"-foo\", 0);\n ensure_error_at(\"0foo\", 0);\n ensure_error_at(\"foo bar\", 3);\n ensure_error_at(\"foo/bar\", 3);\n\n ensure_error_at(\"foo:bar\", 3);\n ensure_error_at(\":foo\", 0);\n ensure_error_at(\"foo:\", 3);\n }\n}" ]
f702dfcd3ce66614a66e15b03e3171137bff0239
535
rs
Rust
jimmys-first-rust-lambda/src/main.rs
JimTheMan/Jimmys-First-Serverless-Rust-Lambda
f6bc5d3871758ac2921e0dabea11bda37f2e34ee
[ "MIT" ]
null
null
null
jimmys-first-rust-lambda/src/main.rs
JimTheMan/Jimmys-First-Serverless-Rust-Lambda
f6bc5d3871758ac2921e0dabea11bda37f2e34ee
[ "MIT" ]
null
null
null
jimmys-first-rust-lambda/src/main.rs
JimTheMan/Jimmys-First-Serverless-Rust-Lambda
f6bc5d3871758ac2921e0dabea11bda37f2e34ee
[ "MIT" ]
null
null
null
use lambda_runtime::{error::HandlerError, lambda, Context}; use serde_json::Value; fn main() { lambda!(handler) } fn handler( event: Value, _: Context, ) -> Result<Value, HandlerError> { Ok(event) } #[cfg(test)] mod tests { use super::*; use serde_json::json; #[test] fn handler_handles() { let event = json!({ "answer": 42 }); assert_eq!( handler(event.clone(), Context::default()).expect("expected Ok(_) value"), event ) } }
17.258065
86
0.540187
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn handler_handles() {\n let event = json!({\n \"answer\": 42\n });\n assert_eq!(\n handler(event.clone(), Context::default()).expect(\"expected Ok(_) value\"),\n event\n )\n }\n}" ]
f702fe1374da6122b5dbaeb021f1afa7c72552f4
28,698
rs
Rust
src/manifest.rs
nickbabcock/cargo-deb
c8fbe560126afaef95770fc2e1338172bc8df45c
[ "MIT" ]
null
null
null
src/manifest.rs
nickbabcock/cargo-deb
c8fbe560126afaef95770fc2e1338172bc8df45c
[ "MIT" ]
null
null
null
src/manifest.rs
nickbabcock/cargo-deb
c8fbe560126afaef95770fc2e1338172bc8df45c
[ "MIT" ]
null
null
null
use std::env::consts::{DLL_PREFIX, DLL_SUFFIX}; use std::path::{Path, PathBuf}; use std::process::Command; use std::fs; use std::collections::{HashMap, HashSet}; use std::borrow::Cow; use listener::Listener; use toml; use file; use glob; use dependencies::resolve; use serde_json; use error::*; use try::Try; use config::CargoConfig; fn is_glob_pattern(s: &str) -> bool { s.contains('*') || s.contains('[') || s.contains(']') || s.contains('!') } #[derive(Debug, Clone)] pub enum AssetSource { /// Copy file from the path (and strip binary if needed). Path(PathBuf), /// Write data to destination as-is. Data(Vec<u8>), } impl AssetSource { pub fn path(&self) -> Option<&Path> { match *self { AssetSource::Path(ref p) => Some(p), _ => None, } } pub fn len(&self) -> Option<u64> { match *self { // FIXME: may not be accurate if the executable is not stripped yet? AssetSource::Path(ref p) => { fs::metadata(p).ok().map(|m| m.len()) }, AssetSource::Data(ref d) => { Some(d.len() as u64) }, } } pub fn data(&self) -> CDResult<Cow<[u8]>> { Ok(match *self { AssetSource::Path(ref p) => { let data = file::get(p) .map_err(|e| CargoDebError::IoFile("unable to read asset to add to archive", e, p.to_owned()))?; Cow::Owned(data) }, AssetSource::Data(ref d) => { Cow::Borrowed(d) }, }) } } #[derive(Debug, Clone)] pub struct Asset { pub source: AssetSource, pub target_path: PathBuf, pub chmod: u32, is_built: bool, } impl Asset { pub fn new(source: AssetSource, mut target_path: PathBuf, chmod: u32, is_built: bool) -> Self { if target_path.is_absolute() { target_path = target_path.strip_prefix("/").expect("no root dir").to_owned(); } // is_dir() is only for paths that exist if target_path.to_string_lossy().ends_with('/') { let file_name = source.path().and_then(|p| p.file_name()).expect("source must be a file"); target_path = target_path.join(file_name); } Self { source, target_path, chmod, is_built, } } fn is_executable(&self) -> bool { 0 != (self.chmod & 0o111) } fn is_dynamic_library(&self) -> bool { self.target_path.file_name() .and_then(|f| f.to_str()) .map_or(false, |f| f.ends_with(DLL_SUFFIX)) } } #[derive(Debug)] /// Cargo deb configuration read from the manifest and cargo metadata pub struct Config { /// Root directory where `Cargo.toml` is located. It's a subdirectory in workspaces. pub manifest_dir: PathBuf, /// Triple. `None` means current machine architecture. pub target: Option<String>, /// `CARGO_TARGET_DIR` pub target_dir: PathBuf, /// The name of the project to build pub name: String, /// The software license of the project (SPDX format). pub license: Option<String>, /// The location of the license file pub license_file: Option<PathBuf>, /// number of lines to skip when reading `license_file` pub license_file_skip_lines: usize, /// The copyright of the project /// (Debian's `copyright` file contents). pub copyright: String, pub changelog: Option<String>, /// The version number of the project. pub version: String, /// The homepage URL of the project. pub homepage: Option<String>, /// Documentation URL from `Cargo.toml`. Fallback if `homepage` is missing. pub documentation: Option<String>, /// The URL of the software repository. pub repository: Option<String>, /// A short description of the project. pub description: String, /// An extended description of the project. pub extended_description: Option<String>, /// The maintainer of the Debian package. /// In Debian `control` file `Maintainer` field format. pub maintainer: String, /// The Debian dependencies required to run the project. pub depends: String, /// The Debian software category to which the package belongs. pub section: Option<String>, /// The Debian priority of the project. Typically 'optional'. pub priority: String, /// `Conflicts` Debian control field. /// /// See [PackageTransition](https://wiki.debian.org/PackageTransition). pub conflicts: Option<String>, /// `Breaks` Debian control field. /// /// See [PackageTransition](https://wiki.debian.org/PackageTransition). pub breaks: Option<String>, /// `Replaces` Debian control field. /// /// See [PackageTransition](https://wiki.debian.org/PackageTransition). pub replaces: Option<String>, /// `Provides` Debian control field. /// /// See [PackageTransition](https://wiki.debian.org/PackageTransition). pub provides: Option<String>, /// The Debian architecture of the target system. pub architecture: String, /// A list of configuration files installed by the package. pub conf_files: Option<String>, /// All of the files that are to be packaged. pub assets: Vec<Asset>, /// The path were possible maintainer scripts live pub maintainer_scripts: Option<PathBuf>, /// List of Cargo features to use during build pub features: Vec<String>, pub default_features: bool, /// Should the binary be stripped from debug symbols? pub strip: bool, _use_constructor_to_make_this_struct_: (), } impl Config { /// Makes a new config from `Cargo.toml` in the current working directory. /// /// `None` target means the host machine's architecture. pub fn from_manifest(manifest_path: &Path, target: Option<&str>, variant: Option<&str>, listener: &mut Listener) -> CDResult<Config> { let metadata = cargo_metadata(manifest_path)?; let root_id = metadata.resolve.root; let root_package = metadata.packages.iter() .find(|p| p.id == root_id) .ok_or("Unable to find root package in cargo metadata")?; let target_dir = Path::new(&metadata.target_directory); let manifest_path = Path::new(&root_package.manifest_path); let manifest_dir = manifest_path.parent().unwrap(); let content = file::get_text(&manifest_path) .map_err(|e| CargoDebError::IoFile("unable to read Cargo.toml", e, manifest_path.to_owned()))?; toml::from_str::<Cargo>(&content)?.into_config(root_package, manifest_dir, target_dir, target, variant, listener) } pub(crate) fn get_dependencies(&self, listener: &mut Listener) -> CDResult<String> { let mut deps = HashSet::new(); for word in self.depends.split(',') { let word = word.trim(); if word == "$auto" { for bname in self.all_binaries().into_iter().filter_map(|p| p.path()) { match resolve(bname, &self.architecture, listener) { Ok(bindeps) => for dep in bindeps { deps.insert(dep); }, Err(err) => { listener.warning(format!("{} (no auto deps for {})", err, bname.display())); }, }; } } else { deps.insert(word.to_owned()); } } Ok(deps.into_iter().collect::<Vec<_>>().join(", ")) } pub(crate) fn add_copyright_asset(&mut self) -> CDResult<()> { let copyright_file = ::data::generate_copyright_asset(self)?; self.assets.push(Asset::new( AssetSource::Data(copyright_file), Path::new("usr/share/doc") .join(&self.name) .join("copyright"), 0o644, false, )); Ok(()) } fn add_changelog_asset(&mut self) -> CDResult<()> { // The file is autogenerated later if self.changelog.is_some() { if let Some(changelog_file) = ::data::generate_changelog_asset(self)? { self.assets.push(Asset::new( AssetSource::Data(changelog_file), Path::new("usr/share/doc") .join(&self.name) .join("changelog.gz"), 0o644, false, )); } } Ok(()) } /// Executables AND dynamic libraries fn all_binaries(&self) -> Vec<&AssetSource> { self.binaries(false) } /// Executables AND dynamic libraries, but only in `target/release` pub(crate) fn built_binaries(&self) -> Vec<&AssetSource> { self.binaries(true) } fn binaries(&self, built_only: bool) -> Vec<&AssetSource> { self.assets.iter().filter_map(|asset| { // Assumes files in build dir which have executable flag set are binaries if (!built_only || asset.is_built) && (asset.is_dynamic_library() || asset.is_executable()) { Some(&asset.source) } else { None } }).collect() } /// Tries to guess type of source control used for the repo URL. /// It's a guess, and it won't be 100% accurate, because Cargo suggests using /// user-friendly URLs or webpages instead of tool-specific URL schemes. pub(crate) fn repository_type(&self) -> Option<&str> { if let Some(ref repo) = self.repository { if repo.starts_with("git+") || repo.ends_with(".git") || repo.contains("git@") || repo.contains("github.com") || repo.contains("gitlab.com") { return Some("Git"); } if repo.starts_with("cvs+") || repo.contains("pserver:") || repo.contains("@cvs.") { return Some("Cvs"); } if repo.starts_with("hg+") || repo.contains("hg@") || repo.contains("/hg.") { return Some("Hg"); } if repo.starts_with("svn+") || repo.contains("/svn.") { return Some("Svn"); } return None; } None } pub(crate) fn path_in_build<P: AsRef<Path>>(&self, rel_path: P) -> PathBuf { self.target_dir.join("release").join(rel_path) } pub(crate) fn path_in_workspace<P: AsRef<Path>>(&self, rel_path: P) -> PathBuf { self.manifest_dir.join(rel_path) } pub(crate) fn deb_dir(&self) -> PathBuf { self.target_dir.join("debian") } pub fn path_in_deb<P: AsRef<Path>>(&self, rel_path: P) -> PathBuf { self.deb_dir().join(rel_path) } pub(crate) fn cargo_config(&self) -> CDResult<Option<CargoConfig>> { CargoConfig::new(&self.target_dir) } } #[derive(Clone, Debug, Deserialize)] struct Cargo { pub package: CargoPackage, pub profile: Option<CargoProfiles>, } impl Cargo { /// Convert Cargo.toml/metadata information into internal configu structure /// /// **IMPORTANT**: This function must not create or expect to see any files on disk! /// It's run before destination directory is cleaned up, and before the build start! /// fn into_config( mut self, root_package: &CargoMetadataPackage, manifest_dir: &Path, target_dir: &Path, target: Option<&str>, variant: Option<&str>, listener: &mut Listener, ) -> CDResult<Config> { // Cargo cross-compiles to a dir let target_dir = if let Some(target) = target { target_dir.join(target) } else { target_dir.to_owned() }; // If we build against a variant use that config and change the package name let mut deb = if let Some(variant) = variant { // Use dash as underscore is not allowed in package names self.package.name = format!("{}-{}", self.package.name, variant); let mut deb = self.package .metadata .take() .and_then(|m| m.deb) .unwrap_or_else(CargoDeb::default); let variant = deb.variants .as_mut() .and_then(|v| v.remove(variant)) .ok_or(CargoDebError::VariantNotFound(variant.to_string()))?; variant.inherit_from(deb) } else { self.package .metadata .take() .and_then(|m| m.deb) .unwrap_or_else(CargoDeb::default) }; let (license_file, license_file_skip_lines) = self.license_file(deb.license_file.as_ref())?; let readme = self.package.readme.as_ref(); self.check_config(manifest_dir, readme, &deb, listener); let mut config = Config { manifest_dir: manifest_dir.to_owned(), target: target.map(|t| t.to_string()), target_dir, name: self.package.name.clone(), license: self.package.license.take(), license_file, license_file_skip_lines, copyright: deb.copyright.take().ok_or_then(|| { Ok(self.package.authors.as_ref().ok_or("Package must have a copyright or authors")?.join(", ")) })?, version: self.version_string(deb.revision), homepage: self.package.homepage.clone(), documentation: self.package.documentation.clone(), repository: self.package.repository.take(), description: self.package.description.take().unwrap_or_else(||format!("[generated from Rust crate {}]", self.package.name)), extended_description: self.extended_description(deb.extended_description.take(), readme)?, maintainer: deb.maintainer.take().ok_or_then(|| { Ok(self.package.authors.as_ref().and_then(|a|a.get(0)) .ok_or("Package must have a maintainer or authors")?.to_owned()) })?, depends: deb.depends.take().unwrap_or("$auto".to_owned()), conflicts: deb.conflicts.take(), breaks: deb.breaks.take(), replaces: deb.replaces.take(), provides: deb.provides.take(), section: deb.section.take(), priority: deb.priority.take().unwrap_or("optional".to_owned()), architecture: get_arch(target.unwrap_or(::DEFAULT_TARGET)).to_owned(), conf_files: deb.conf_files.map(|x| x.iter().fold(String::new(), |a, b| a + b + "\n")), assets: vec![], changelog: deb.changelog.take(), maintainer_scripts: deb.maintainer_scripts.map(PathBuf::from), features: deb.features.take().unwrap_or(vec![]), default_features: deb.default_features.unwrap_or(true), strip: self.profile.as_ref().and_then(|p|p.release.as_ref()) .and_then(|r|r.debug).map(|debug|!debug).unwrap_or(true), _use_constructor_to_make_this_struct_: (), }; let assets = self.take_assets(&config, deb.assets.take(), &root_package.targets, readme)?; if assets.is_empty() { Err("No binaries or cdylibs found. The package is empty. Please specify some assets to package in Cargo.toml")?; } config.assets.extend(assets); config.add_copyright_asset()?; config.add_changelog_asset()?; Ok(config) } fn check_config(&self, manifest_dir: &Path, readme: Option<&String>, deb: &CargoDeb, listener: &mut Listener) { if self.package.description.is_none() { listener.warning("description field is missing in Cargo.toml".to_owned()); } if self.package.license.is_none() { listener.warning("license field is missing in Cargo.toml".to_owned()); } if let Some(readme) = readme { if deb.extended_description.is_none() && (readme.ends_with(".md") || readme.ends_with(".markdown")) { listener.warning(format!("extended-description field missing. Using {}, but markdown may not render well.",readme)); } } else { for p in &["README.md", "README.markdown", "README.txt", "README"] { if manifest_dir.join(p).exists() { listener.warning(format!("{} file exists, but is not specified in `readme` Cargo.toml field", p)); break; } } } } fn extended_description(&self, desc: Option<String>, readme: Option<&String>) -> CDResult<Option<String>> { Ok(if desc.is_some() { desc } else if let Some(readme) = readme { Some(file::get_text(readme) .map_err(|err| CargoDebError::IoFile("unable to read README", err, PathBuf::from(readme)))?) } else { None }) } fn license_file(&mut self, license_file: Option<&Vec<String>>) -> CDResult<(Option<PathBuf>, usize)> { if let Some(args) = license_file { let mut args = args.iter(); let file = args.next(); let lines = if let Some(lines) = args.next() { lines.parse().map_err(|e| CargoDebError::NumParse("invalid number of lines", e))? } else {0}; Ok((file.map(|s|s.into()), lines)) } else { Ok((self.package.license_file.as_ref().map(|s| s.into()), 0)) } } fn take_assets(&self, options: &Config, assets: Option<Vec<Vec<String>>>, targets: &[CargoMetadataTarget], readme: Option<&String>) -> CDResult<Vec<Asset>> { Ok(if let Some(assets) = assets { let mut all_assets = Vec::with_capacity(assets.len()); for mut v in assets { let mut v = v.drain(..); let source_path = PathBuf::from(v.next() .ok_or("missing path (first array entry) for asset in Cargo.toml")?); let (is_built, source_path) = if let Ok(rel_path) = source_path.strip_prefix("target/release") { (true, options.path_in_build(rel_path)) } else { (false, options.path_in_workspace(&source_path)) }; let target_path = PathBuf::from(v.next().ok_or("missing target (second array entry) for asset in Cargo.toml")?); let mode = u32::from_str_radix(&v.next().ok_or("missing chmod (third array entry) for asset in Cargo.toml")?, 8) .map_err(|e| CargoDebError::NumParse("unable to parse chmod argument", e))?; let source_prefix: PathBuf = source_path.iter() .take_while(|part| !is_glob_pattern(part.to_str().unwrap())) .collect(); let source_is_glob = is_glob_pattern(source_path.to_str().unwrap()); let mut file_matches = glob::glob(source_path.to_str().unwrap())? // Remove dirs from globs without throwing away errors .map(|entry| { let source_file = entry?; Ok(if source_file.is_dir() { None } else { Some(source_file) }) }) .filter_map(|res| match res { Ok(None) => None, Ok(Some(x)) => Some(Ok(x)), Err(x) => Some(Err(x)), }) .collect::<CDResult<Vec<_>>>()?; // If glob didn't match anything, it's probably a regular path // to a file that hasn't been built yet if file_matches.is_empty() { file_matches.push(source_path); } for source_file in file_matches { // XXX: how do we handle duplicated assets? let target_file = if source_is_glob { target_path.join(source_file.strip_prefix(&source_prefix).unwrap()) } else { target_path.clone() }; all_assets.push(Asset::new( AssetSource::Path(source_file), target_file, mode, is_built, )); } } all_assets } else { let mut implied_assets: Vec<_> = targets .iter() .filter_map(|t| { if t.crate_types.iter().any(|ty| ty == "bin") && t.kind.iter().any(|k| k == "bin") { Some(Asset::new( AssetSource::Path(options.path_in_build(&t.name)), Path::new("usr/bin").join(&t.name), 0o755, true, )) } else if t.crate_types.iter().any(|ty| ty == "cdylib") && t.kind.iter().any(|k| k == "cdylib") { // FIXME: std has constants for the host arch, but not for cross-compilation let lib_name = format!("{}{}{}", DLL_PREFIX, t.name, DLL_SUFFIX); Some(Asset::new( AssetSource::Path(options.path_in_build(&lib_name)), Path::new("usr/lib").join(lib_name), 0o644, true, )) } else { None } }) .collect(); if let Some(readme) = readme { let target_path = Path::new("usr/share/doc").join(&self.package.name).join(readme); implied_assets.push(Asset::new( AssetSource::Path(PathBuf::from(readme)), target_path, 0o644, false, )); } implied_assets }) } fn version_string(&self, revision: Option<String>) -> String { if let Some(revision) = revision { format!("{}-{}", self.package.version, revision) } else { self.package.version.clone() } } } #[derive(Clone, Debug, Deserialize)] #[serde(rename_all = "kebab-case")] struct CargoPackage { pub name: String, pub authors: Option<Vec<String>>, pub license: Option<String>, pub license_file: Option<String>, pub homepage: Option<String>, pub documentation: Option<String>, pub repository: Option<String>, pub version: String, pub description: Option<String>, pub readme: Option<String>, pub metadata: Option<CargoPackageMetadata>, } #[derive(Clone, Debug, Deserialize)] struct CargoPackageMetadata { pub deb: Option<CargoDeb>, } #[derive(Clone, Debug, Deserialize)] struct CargoProfiles { pub release: Option<CargoProfile>, } #[derive(Clone, Debug, Deserialize)] struct CargoProfile { pub debug: Option<bool>, } #[derive(Clone, Debug, Deserialize)] #[serde(rename_all = "kebab-case")] struct CargoBin { pub name: String, pub plugin: Option<bool>, pub proc_macro: Option<bool>, } #[derive(Clone, Debug, Deserialize, Default)] #[serde(rename_all = "kebab-case", deny_unknown_fields)] struct CargoDeb { pub maintainer: Option<String>, pub copyright: Option<String>, pub license_file: Option<Vec<String>>, pub changelog: Option<String>, pub depends: Option<String>, pub conflicts: Option<String>, pub breaks: Option<String>, pub replaces: Option<String>, pub provides: Option<String>, pub extended_description: Option<String>, pub section: Option<String>, pub priority: Option<String>, pub revision: Option<String>, pub conf_files: Option<Vec<String>>, pub assets: Option<Vec<Vec<String>>>, pub maintainer_scripts: Option<String>, pub features: Option<Vec<String>>, pub default_features: Option<bool>, pub variants: Option<HashMap<String, CargoDeb>>, } impl CargoDeb { fn inherit_from(self, parent: CargoDeb) -> CargoDeb { CargoDeb { maintainer: self.maintainer.or(parent.maintainer), copyright: self.copyright.or(parent.copyright), license_file: self.license_file.or(parent.license_file), changelog: self.changelog.or(parent.changelog), depends: self.depends.or(parent.depends), conflicts: self.conflicts.or(parent.conflicts), breaks: self.breaks.or(parent.breaks), replaces: self.replaces.or(parent.replaces), provides: self.provides.or(parent.provides), extended_description: self.extended_description.or(parent.extended_description), section: self.section.or(parent.section), priority: self.priority.or(parent.priority), revision: self.revision.or(parent.revision), conf_files: self.conf_files.or(parent.conf_files), assets: self.assets.or(parent.assets), maintainer_scripts: self.maintainer_scripts.or(parent.maintainer_scripts), features: self.features.or(parent.features), default_features: self.default_features.or(parent.default_features), variants: self.variants.or(parent.variants), } } } #[derive(Deserialize)] struct CargoMetadata { packages: Vec<CargoMetadataPackage>, resolve: CargoMetadataResolve, target_directory: String, } #[derive(Deserialize)] struct CargoMetadataResolve { root: String, } #[derive(Deserialize)] struct CargoMetadataPackage { pub id: String, pub targets: Vec<CargoMetadataTarget>, pub manifest_path: String, } #[derive(Deserialize)] struct CargoMetadataTarget { pub name: String, pub kind: Vec<String>, pub crate_types: Vec<String>, } /// Returns the path of the `Cargo.toml` that we want to build. fn cargo_metadata(manifest_path: &Path) -> CDResult<CargoMetadata> { let mut cmd = Command::new("cargo"); cmd.arg("metadata"); cmd.arg("--format-version=1"); cmd.arg(format!("--manifest-path={}", manifest_path.display())); let output = cmd.output() .map_err(|e| CargoDebError::CommandFailed(e, "cargo (is it in your PATH?)"))?; if !output.status.success() { return Err(CargoDebError::CommandError("cargo", "metadata".to_owned(), output.stderr)); } let stdout = String::from_utf8(output.stdout).unwrap(); let metadata = serde_json::from_str(&stdout)?; Ok(metadata) } /// Debianizes the architecture name fn get_arch(target: &str) -> &str { let mut parts = target.split('-'); let arch = parts.next().unwrap(); let abi = parts.last().unwrap_or(""); match (arch, abi) { // https://wiki.debian.org/Multiarch/Tuples // rustc --print target-list // https://doc.rust-lang.org/std/env/consts/constant.ARCH.html ("aarch64", _) => "arm64", ("mips64", "gnuabin32") => "mipsn32", ("mips64el", "gnuabin32") => "mipsn32el", ("mipsisa32r6", _) => "mipsr6", ("mipsisa32r6el", _) => "mipsr6el", ("mipsisa64r6", "gnuabi64") => "mips64r6", ("mipsisa64r6", "gnuabin32") => "mipsn32r6", ("mipsisa64r6el", "gnuabi64") => "mips64r6el", ("mipsisa64r6el", "gnuabin32") => "mipsn32r6el", ("powerpc", "gnuspe") => "powerpcspe", ("powerpc64", _) => "ppc64", ("powerpc64le", _) => "ppc64el", ("i586", _) | ("i686", _) | ("x86", _) => "i386", ("x86_64", "gnux32") => "x32", ("x86_64", _) => "amd64", (arm, gnueabi) if arm.starts_with("arm") && gnueabi.ends_with("hf") => "armhf", (arm, _) if arm.starts_with("arm") => "armel", (other_arch, _) => other_arch, } } #[test] fn match_arm_arch() { assert_eq!("armhf", get_arch("arm-unknown-linux-gnueabihf")); } #[test] fn assets() { let a = Asset::new( AssetSource::Path(PathBuf::from("target/release/bar")), PathBuf::from("baz/"), 0o644, true, ); assert_eq!("baz/bar", a.target_path.to_str().unwrap()); assert!(a.is_built); let a = Asset::new( AssetSource::Path(PathBuf::from("foo/bar")), PathBuf::from("/baz/quz"), 0o644, false, ); assert_eq!("baz/quz", a.target_path.to_str().unwrap()); assert!(!a.is_built); }
38.111554
161
0.563628
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn match_arm_arch() {\n assert_eq!(\"armhf\", get_arch(\"arm-unknown-linux-gnueabihf\"));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn assets() {\n let a = Asset::new(\n AssetSource::Path(PathBuf::from(\"target/release/bar\")),\n PathBuf::from(\"baz/\"),\n 0o644,\n true,\n );\n assert_eq!(\"baz/bar\", a.target_path.to_str().unwrap());\n assert!(a.is_built);\n\n let a = Asset::new(\n AssetSource::Path(PathBuf::from(\"foo/bar\")),\n PathBuf::from(\"/baz/quz\"),\n 0o644,\n false,\n );\n assert_eq!(\"baz/quz\", a.target_path.to_str().unwrap());\n assert!(!a.is_built);\n}\n}" ]
f703035ad9d83c3df928dcdd6238f03faf9ad152
10,322
rs
Rust
kf-protocol-api/src/error.rs
vijaylaxmid/flv-kf-protocol
19d5811be008b941c32c1a293cdc41b13aa024f0
[ "Apache-2.0" ]
null
null
null
kf-protocol-api/src/error.rs
vijaylaxmid/flv-kf-protocol
19d5811be008b941c32c1a293cdc41b13aa024f0
[ "Apache-2.0" ]
null
null
null
kf-protocol-api/src/error.rs
vijaylaxmid/flv-kf-protocol
19d5811be008b941c32c1a293cdc41b13aa024f0
[ "Apache-2.0" ]
null
null
null
use flv_util::string_helper::upper_cammel_case_to_sentence; use serde::{Serialize, Deserialize}; use kf_protocol_derive::Decode; use kf_protocol_derive::Encode; /// kafka error /// https://kafka.apache.org/protocol#protocol_types #[repr(i16)] #[derive(PartialEq, Debug, Clone, Copy, Serialize, Deserialize, Encode, Decode)] pub enum ErrorCode { // The server experienced an unexpected error when processing the request UnknownServerError = -1, None = 0, // The requested offset is not within the range of offsets maintained by the server. OffsetOutOfRange = 1, // This message has failed its CRC checksum, exceeds the valid size, or is otherwise corrupt. CorruptMessage = 2, //This server does not host this topic-partition. UnknownTopicOrPartition = 3, // The requested fetch size is invalid. InvalidFetchSize = 4, // There is no leader for this topic-partition as we are in the middle of a leadership election. LeaderNotAvailable = 5, // This server is not the leader for that topic-partition. NotLeaderForPartition = 6, // The request timed out. RequestTimedOut = 7, // The broker is not available. BrokerNotAvailable = 8, // The replica is not available for the requested topic-partition ReplicaNotAvailable = 9, // The request included a message larger than the max message size the server will accept. MessageTooLarge = 10, // The controller moved to another broker. StaleControllerEpoch = 11, // The metadata field of the offset request was too large. OffsetMetadataTooLarge = 12, // The server disconnected before a response was received. NetworkException = 13, // The coordinator is loading and hence can't process requests. CoordinatorLoadInProgress = 14, // The coordinator is not available. CoordinatorNotAvailable = 15, // This is not the correct coordinato NotCoordinator = 16, // The request attempted to perform an operation on an invalid topic. InvalidTopicException = 17, // The request included message batch larger than the configured segment size on the server. RecordListTooLarge = 18, // Messages are rejected since there are fewer in-sync replicas than required. NotEnoughReplicas = 19, // Messages are written to the log, but to fewer in-sync replicas than required. NotEnougReplicasAfterAppend = 20, // Produce request specified an invalid value for required acks. InvalidRequiredAcks = 21, // Specified group generation id is not valid IllegalGeneration = 22, // The group member's supported protocols are incompatible with those of existing members or first group member tried to join with empty protocol type or empty protocol list. InconsistentGroupProtocol = 23, // The configured groupId is invalid InvalidGroupId = 24, // The coordinator is not aware of this member. UnknownMemberId = 25, // The session timeout is not within the range allowed by the broker (as configured by group.min.session.timeout.ms and group.max.session.timeout.ms). InvalidSessionTimeout = 26, // The group is rebalancing, so a rejoin is needed. RebalanceInProgress = 27, // The committing offset data size is not valid InvalidCommitOffsetSize = 28, // Not authorized to access topics: [Topic authorization failed.] TopicAuthorizationFailed = 29, // Not authorized to access group: Group authorization failed. GroupAuthorizationFailed = 30, // Cluster authorization failed. ClusterAuthorizationFailed = 31, // The timestamp of the message is out of acceptable range. InvalidTimestamp = 32, // The broker does not support the requested SASL mechanism. UnsupportedSaslMechanism = 33, // Request is not valid given the current SASL state. IllegalSaslState = 34, // The version of API is not supported. UnsupportedVersion = 35, // Topic with this name already exists. TopicAlreadyExists = 36, // Number of partitions is invalid. InvalidPartitions = 37, // Replication-factor is invalid. InvalidReplicationFactor = 38, // Replica assignment is invalid. InvalidReplicaAssignment = 39, // Configuration is invalid. InvalidConfig = 40, // This is not the correct controller for this cluster. NotController = 41, // This most likely occurs because of a request being malformed by the client library or the message was sent to an incompatible broker. See the broker logs for more details InvalidRequest = 42, // The message format version on the broker does not support the request. UnsupportedForMessageFormat = 43, // Request parameters do not satisfy the configured policy. PolicyViolation = 44, // The broker received an out of order sequence number OutOfOrderSequenceNumber = 45, // The broker received a duplicate sequence number DuplicateSequenceNumber = 46, // Producer attempted an operation with an old epoch. Either there is a newer producer with the same transactionalId, or the producer's transaction has been expired by the broker. InvalidProducerEpoch = 47, // The producer attempted a transactional operation in an invalid state InvalidTxnState = 48, // The producer attempted to use a producer id which is not currently assigned to its transactional id InvalidProducerIdMapping = 49, // The transaction timeout is larger than the maximum value allowed by the broker (as configured by transaction.max.timeout.ms). InvalidTransactionTimeout = 50, // The producer attempted to update a transaction while another concurrent operation on the same transaction was ongoing ConcurrentTransactions = 51, // Indicates that the transaction coordinator sending a WriteTxnMarker is no longer the current coordinator for a given producer TransactionCoordinatorFenced = 52, // Transactional Id authorization failed TransactionalIdAuthorizationFailed = 53, // Security features are disabled. SecurityDisabled = 54, // The broker did not attempt to execute this operation. This may happen for batched RPCs where some operations in the batch failed, causing the broker to respond without trying the rest. OperationNotAttempted = 55, // Disk error when trying to access log file on the disk. KafkaStorageError = 56, // The user-specified log directory is not found in the broker config. LogDirNotFound = 57, // SASL Authentication failed SaslAuthenticationFailed = 58, // This exception is raised by the broker if it could not locate the producer metadata associated with the producerId in question. This could happen if, for instance, the producer's records were deleted because their retention time had elapsed. Once the last records of the producerId are removed, the producer's metadata is removed from the broker, and future appends by the producer will return this exception. UnknownProducerId = 59, // A partition reassignment is in progress ReassignmentInProgress = 60, // Delegation Token feature is not enabled. DelegationTokenAuthDisabled = 61, // Delegation Token is not found on server. DelegationTokenNotFound = 62, // Specified Principal is not valid Owner/Renewer. DelegationTokenOwnerMismatch = 63, // Delegation Token requests are not allowed on PLAINTEXT/1-way SSL channels and on delegation token authenticated channels. DelegationTokenRequestNotAllowed = 64, // Delegation Token authorization failed. DelegationTokenAuthorizationFailed = 65, // Delegation Token is expired. DelegationTokenExpired = 66, // Supplied principalType is not supported InvalidPrincipleType = 67, // The group The group is not empty is not empty NonEmptyGroup = 68, // The group id The group id does not exist was not found GroupIdNotFound = 69, // The fetch session ID was not found FetchSessionIdNotFound = 70, // The fetch session epoch is invalid InvalidFetchSessionEpoch = 71, // There is no listener on the leader broker that matches the listener // on which metadata request was processed. ListenerNotFound = 72, // Topic deletion is disabled TopicDeletionDisabled = 73, // The leader epoch in the request is older than the epoch on the broker FencedLeaderEpoch = 74, // The leader epoch in the request is newer than the epoch on the broker UnknownLeaderEpoch = 75, // The requesting client does not support the compression type of given partition UnsupportedCompressionType = 76, // Broker epoch has changed StaleBrokerEpoch = 77, // The leader high watermark has not caught up from a recent leader election // so the offsets cannot be guaranteed to be monotonically increasing OffsetNotAvailable = 78, // The group member needs to have a valid member id before actually // entering a consumer group MemberIdRequired = 79, // The preferred leader was not available PreferredLeaderNotAvailable = 80, // Consumer group The consumer group has reached maximum number of members allowed GroupMaxSizeReached = 81, } impl Default for ErrorCode { fn default() -> ErrorCode { ErrorCode::None } } impl ErrorCode { pub fn is_ok(&self) -> bool { match self { Self::None => true, _ => false } } pub fn to_string(&self) -> String { match self { ErrorCode::None => "Ok".to_owned(), _ => format!("{:?}", self), } } pub fn to_sentence(&self) -> String { match self { ErrorCode::None => "".to_owned(), _ => upper_cammel_case_to_sentence(format!("{:?}", self), false), } } pub fn is_error(&self) -> bool { match self { ErrorCode::None => false, _ => true, } } } #[cfg(test)] mod test { use std::convert::TryInto; use super::ErrorCode; #[test] fn test_error_code_from_conversion() { let val: i16 = 6; let erro_code: ErrorCode = val.try_into().expect("convert"); assert_eq!(erro_code, ErrorCode::NotLeaderForPartition); } }
32.664557
416
0.706452
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_error_code_from_conversion() {\n let val: i16 = 6;\n let erro_code: ErrorCode = val.try_into().expect(\"convert\");\n assert_eq!(erro_code, ErrorCode::NotLeaderForPartition);\n }\n}" ]
f7033ea3e1d72a6ab831e5f7db1b3ddb9dbacf80
1,627
rs
Rust
tests/gauge.rs
heyrict/tui-rs
2233cdc9cce34342eef778912d87e0121a734f4e
[ "MIT" ]
2
2020-02-24T10:33:22.000Z
2021-11-03T03:25:31.000Z
tests/gauge.rs
heyrict/tui-rs
2233cdc9cce34342eef778912d87e0121a734f4e
[ "MIT" ]
1
2019-11-20T05:48:58.000Z
2019-11-20T05:48:58.000Z
tests/gauge.rs
heyrict/tui-rs
2233cdc9cce34342eef778912d87e0121a734f4e
[ "MIT" ]
2
2020-11-17T13:08:48.000Z
2021-04-18T12:32:43.000Z
use tui::backend::TestBackend; use tui::buffer::Buffer; use tui::layout::{Constraint, Direction, Layout}; use tui::widgets::{Block, Borders, Gauge, Widget}; use tui::Terminal; #[test] fn gauge_render() { let backend = TestBackend::new(40, 10); let mut terminal = Terminal::new(backend).unwrap(); terminal .draw(|mut f| { let chunks = Layout::default() .direction(Direction::Vertical) .margin(2) .constraints([Constraint::Percentage(50), Constraint::Percentage(50)].as_ref()) .split(f.size()); Gauge::default() .block(Block::default().title("Percentage").borders(Borders::ALL)) .percent(43) .render(&mut f, chunks[0]); Gauge::default() .block(Block::default().title("Ratio").borders(Borders::ALL)) .ratio(0.2113139343131) .render(&mut f, chunks[1]); }) .unwrap(); let expected = Buffer::with_lines(vec![ " ", " ", " ┌Percentage────────────────────────┐ ", " │ 43% │ ", " └──────────────────────────────────┘ ", " ┌Ratio─────────────────────────────┐ ", " │ 21% │ ", " └──────────────────────────────────┘ ", " ", " ", ]); assert_eq!(&expected, terminal.backend().buffer()); }
37.837209
95
0.385372
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn gauge_render() {\n let backend = TestBackend::new(40, 10);\n let mut terminal = Terminal::new(backend).unwrap();\n terminal\n .draw(|mut f| {\n let chunks = Layout::default()\n .direction(Direction::Vertical)\n .margin(2)\n .constraints([Constraint::Percentage(50), Constraint::Percentage(50)].as_ref())\n .split(f.size());\n\n Gauge::default()\n .block(Block::default().title(\"Percentage\").borders(Borders::ALL))\n .percent(43)\n .render(&mut f, chunks[0]);\n Gauge::default()\n .block(Block::default().title(\"Ratio\").borders(Borders::ALL))\n .ratio(0.2113139343131)\n .render(&mut f, chunks[1]);\n })\n .unwrap();\n let expected = Buffer::with_lines(vec![\n \" \",\n \" \",\n \" ┌Percentage────────────────────────┐ \",\n \" │ 43% │ \",\n \" └──────────────────────────────────┘ \",\n \" ┌Ratio─────────────────────────────┐ \",\n \" │ 21% │ \",\n \" └──────────────────────────────────┘ \",\n \" \",\n \" \",\n ]);\n assert_eq!(&expected, terminal.backend().buffer());\n}\n}" ]
f70349d0ee08bd80b80becca173598ecb950fc6d
4,133
rs
Rust
src/librustc/lib.rs
AaronFriel/rust
33fcb7e7a41aa7d8b0f691ea92ebe10f20d9bd44
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
src/librustc/lib.rs
AaronFriel/rust
33fcb7e7a41aa7d8b0f691ea92ebe10f20d9bd44
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
src/librustc/lib.rs
AaronFriel/rust
33fcb7e7a41aa7d8b0f691ea92ebe10f20d9bd44
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! The Rust compiler. //! //! # Note //! //! This API is completely unstable and subject to change. #![crate_name = "rustc"] #![unstable(feature = "rustc_private", issue = "27812")] #![crate_type = "dylib"] #![crate_type = "rlib"] #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "https://doc.rust-lang.org/favicon.ico", html_root_url = "https://doc.rust-lang.org/nightly/")] #![cfg_attr(not(stage0), deny(warnings))] #![feature(associated_consts)] #![feature(box_patterns)] #![feature(box_syntax)] #![feature(collections)] #![feature(conservative_impl_trait)] #![feature(const_fn)] #![feature(core_intrinsics)] #![feature(dotdot_in_tuple_patterns)] #![feature(enumset)] #![feature(libc)] #![feature(nonzero)] #![feature(quote)] #![feature(rustc_diagnostic_macros)] #![feature(rustc_private)] #![feature(slice_patterns)] #![feature(staged_api)] #![feature(question_mark)] #![cfg_attr(test, feature(test))] extern crate arena; extern crate core; extern crate flate; extern crate fmt_macros; extern crate getopts; extern crate graphviz; extern crate libc; extern crate rbml; extern crate rustc_llvm as llvm; extern crate rustc_back; extern crate rustc_data_structures; extern crate serialize; extern crate collections; extern crate rustc_const_math; extern crate rustc_errors as errors; #[macro_use] extern crate log; #[macro_use] extern crate syntax; #[macro_use] extern crate syntax_pos; #[macro_use] #[no_link] extern crate rustc_bitflags; extern crate serialize as rustc_serialize; // used by deriving #[cfg(test)] extern crate test; #[macro_use] mod macros; // NB: This module needs to be declared first so diagnostics are // registered before they are used. pub mod diagnostics; pub mod cfg; pub mod dep_graph; pub mod hir; pub mod infer; pub mod lint; pub mod middle { pub mod astconv_util; pub mod expr_use_visitor; // STAGE0: increase glitch immunity pub mod const_val; pub mod const_qualif; pub mod cstore; pub mod dataflow; pub mod dead; pub mod dependency_format; pub mod effect; pub mod entry; pub mod free_region; pub mod intrinsicck; pub mod lang_items; pub mod liveness; pub mod mem_categorization; pub mod privacy; pub mod reachable; pub mod region; pub mod recursion_limit; pub mod resolve_lifetime; pub mod stability; pub mod weak_lang_items; } pub mod mir { mod cache; pub mod repr; pub mod tcx; pub mod visit; pub mod transform; pub mod traversal; pub mod mir_map; } pub mod session; pub mod traits; pub mod ty; pub mod util { pub use rustc_back::sha2; pub mod common; pub mod ppaux; pub mod nodemap; pub mod num; pub mod fs; } // A private module so that macro-expanded idents like // `::rustc::lint::Lint` will also work in `rustc` itself. // // `libstd` uses the same trick. #[doc(hidden)] mod rustc { pub use lint; } // FIXME(#27438): right now the unit tests of librustc don't refer to any actual // functions generated in librustc_data_structures (all // references are through generic functions), but statics are // referenced from time to time. Due to this bug we won't // actually correctly link in the statics unless we also // reference a function, so be sure to reference a dummy // function. #[test] fn noop() { rustc_data_structures::__noop_fix_for_27438(); } // Build the diagnostics array at the end so that the metadata includes error use sites. __build_diagnostic_array! { librustc, DIAGNOSTICS }
26.49359
88
0.702153
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn noop() {\n rustc_data_structures::__noop_fix_for_27438();\n}\n}" ]
f703708991631cf0413bc639e0d8b54c950fcdfe
59,635
rs
Rust
tests/client.rs
davidhewitt/fix-rs
25336797b5373893231ea56f43a4c0d8cb3615f5
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
2
2020-10-07T12:41:55.000Z
2021-03-28T03:06:41.000Z
tests/client.rs
davidhewitt/fix-rs
25336797b5373893231ea56f43a4c0d8cb3615f5
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
tests/client.rs
davidhewitt/fix-rs
25336797b5373893231ea56f43a4c0d8cb3615f5
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
// Copyright 2016 James Bendig. See the COPYRIGHT file at the top-level // directory of this distribution. // // Licensed under: // the MIT license // <LICENSE-MIT or https://opensource.org/licenses/MIT> // or the Apache License, Version 2.0 // <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, // at your option. This file may not be copied, modified, or distributed // except according to those terms. #[macro_use] extern crate fix_rs; #[macro_use] extern crate fix_rs_macros; extern crate mio; extern crate phf; use mio::tcp::Shutdown; use std::io::Write; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::{Arc, Mutex}; use std::thread; use std::time::{Duration, Instant}; #[macro_use] mod common; use crate::common::{new_logon_message, TestStream, SERVER_SENDER_COMP_ID, SERVER_TARGET_COMP_ID}; use fix_rs::byte_buffer::ByteBuffer; use fix_rs::dictionary::field_types::other::{MsgDirection, SessionRejectReason}; use fix_rs::dictionary::fields::{MsgTypeGrp, SenderCompID, TargetCompID, Text}; use fix_rs::dictionary::messages::{ Heartbeat, Logon, Logout, Reject, ResendRequest, SequenceReset, TestRequest, }; use fix_rs::field::Field; use fix_rs::field_tag::{self, FieldTag}; use fix_rs::fix::ParseError; use fix_rs::fix_version::FIXVersion; use fix_rs::fixt; use fix_rs::fixt::engine::{ConnectionTerminatedReason, EngineEvent, ResendResponse}; use fix_rs::fixt::message::FIXTMessage; use fix_rs::fixt::tests::{ AUTO_DISCONNECT_AFTER_INBOUND_RESEND_REQUEST_LOOP_COUNT, INBOUND_BYTES_BUFFER_CAPACITY, INBOUND_MESSAGES_BUFFER_LEN_MAX, }; use fix_rs::message::{self, Message, NOT_REQUIRED, REQUIRED}; use fix_rs::message_version::{self, MessageVersion}; fn serialize_and_append_message<T: FIXTMessage>( message: &T, fix_version: FIXVersion, message_version: MessageVersion, buffer: &mut Vec<u8>, ) { let mut bytes = ByteBuffer::new(); message.read(fix_version, message_version, &mut bytes); buffer.extend_from_slice(bytes.bytes()); } #[test] fn test_recv_resend_request_invalid_end_seq_no() { define_dictionary!(Logon, ResendRequest, Reject,); //Connect and Logon. let (mut test_server, _client, _) = TestStream::setup_test_server_and_logon(build_dictionary()); //Send ResendRequest to client with EndSeqNo < BeginSeqNo. let mut message = new_fixt_message!(ResendRequest); message.msg_seq_num = 5; message.begin_seq_no = 2; message.end_seq_no = 1; test_server.send_message(message); //Make sure client responds with an appropriate Reject. let message = test_server.recv_message::<Reject>(); assert_eq!(message.msg_seq_num, 2); assert_eq!(message.ref_seq_num, 5); assert_eq!( message.session_reject_reason.unwrap(), SessionRejectReason::ValueIsIncorrectForThisTag ); } #[test] fn test_send_logout_before_logon() { define_dictionary!(Logon, Logout,); let (mut test_server, mut client, connection) = TestStream::setup_test_server(build_dictionary()); //Send Logout immediately. let mut message = new_fixt_message!(Logout); message.msg_seq_num = 1; test_server.send_message(message); //Give client thread a chance to disconnect. thread::sleep(Duration::from_millis(500)); //Confirm the client socket disconnected. assert!(test_server.is_stream_closed(Duration::from_secs(5))); //Confirm client notified that it disconnected. engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => { assert_eq!(terminated_connection,connection); assert!(if let ConnectionTerminatedReason::LogonNotFirstMessageError = reason { true } else { false }); }); } #[test] fn test_recv_logout_with_high_msg_seq_num() { define_dictionary!(Logon, Logout, ResendRequest, SequenceReset,); //Connect and Logon. let (mut test_server, mut client, connection) = TestStream::setup_test_server_and_logon(build_dictionary()); //Send Logout with a high MsgSeqNum let mut message = new_fixt_message!(Logout); message.msg_seq_num = 15; test_server.send_message(message); //Make sure client tries to retrieve the missing messages. let message = test_server.recv_message::<ResendRequest>(); assert_eq!(message.begin_seq_no, 2); assert!(message.end_seq_no == 0 || message.end_seq_no == 14); //Respond with gap-fill. let mut message = new_fixt_message!(SequenceReset); message.gap_fill_flag = true; message.new_seq_no = 15; message.msg_seq_num = 2; test_server.send_message(message); let _ = engine_poll_message!(client, connection, SequenceReset); //Make sure client responds with Logout now that it's caught up. let message = test_server.recv_message::<Logout>(); assert_eq!(message.msg_seq_num, 3); //Close connection and make sure client notifies that connection closed cleanly. let _ = test_server.stream.shutdown(Shutdown::Both); engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => { assert_eq!(terminated_connection,connection); assert!(if let ConnectionTerminatedReason::RemoteRequested = reason { true } else { false }); }); } #[test] fn test_recv_logout_with_high_msg_seq_num_and_no_reply() { define_dictionary!(Logon, Logout, ResendRequest, SequenceReset,); //Connect and Logon. let (mut test_server, mut client, connection) = TestStream::setup_test_server_and_logon(build_dictionary()); //Send Logout with a high MsgSeqNum let mut message = new_fixt_message!(Logout); message.msg_seq_num = 15; test_server.send_message(message); //Make sure client tries to retrieve the missing messages. let message = test_server.recv_message::<ResendRequest>(); assert_eq!(message.begin_seq_no, 2); assert!(message.end_seq_no == 0 || message.end_seq_no == 14); //Wait around without replying to ResendRequest. thread::sleep(Duration::from_millis(10500)); //Make sure client responds with Logout even though it didn't get caught up. let message = test_server.recv_message::<Logout>(); assert_eq!(message.msg_seq_num, 3); //Close connection and make sure client notifies that connection closed cleanly. let _ = test_server.stream.shutdown(Shutdown::Both); engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => { assert_eq!(terminated_connection,connection); assert!(if let ConnectionTerminatedReason::RemoteRequested = reason { true } else { false }); }); } #[test] fn test_recv_logout_send_logout_recv_resend_request() { define_dictionary!( Heartbeat, Logon, Logout, ResendRequest, SequenceReset, TestRequest, ); //Connect and Logon. let (mut test_server, mut client, connection) = TestStream::setup_test_server_and_logon(build_dictionary()); //Send Logout to client. let mut message = new_fixt_message!(Logout); message.msg_seq_num = 2; test_server.send_message(message); //Make sure client responds with Logout. let message = test_server.recv_message::<Logout>(); assert_eq!(message.msg_seq_num, 2); let _ = engine_poll_message!(client, connection, Logout); //Ask client for missing messages even though they already responded to Logout. This should //cancel the logout when done before the timeout. let mut message = new_fixt_message!(ResendRequest); message.msg_seq_num = 3; message.begin_seq_no = 2; message.end_seq_no = 0; test_server.send_message(message); //Handle the resend request. engine_gap_fill_resend_request!(client, connection, 2..3); let _ = engine_poll_message!(client, connection, ResendRequest); //Make sure ResendRequest is responded to. let message = test_server.recv_message::<SequenceReset>(); assert_eq!(message.msg_seq_num, 2); assert_eq!(message.new_seq_no, 3); //Wait around to make sure the server requested logout was cancelled. thread::sleep(Duration::from_millis(5500)); let _ = test_server.recv_message::<Heartbeat>(); let _ = test_server.recv_message::<TestRequest>(); //Try and logout again but cleanly this time. let mut message = new_fixt_message!(Logout); message.msg_seq_num = 4; test_server.send_message(message); let _ = engine_poll_message!(client, connection, Logout); //Close connection and make sure client notifies that connection closed cleanly. let _ = test_server.stream.shutdown(Shutdown::Both); engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => { assert_eq!(terminated_connection,connection); assert!(if let ConnectionTerminatedReason::RemoteRequested = reason { true } else { false }); }); } #[test] fn test_send_logout_and_recv_resend_request() { define_dictionary!( Heartbeat, Logon, Logout, ResendRequest, SequenceReset, TestRequest, ); //Connect and Logon. let (mut test_server, mut client, connection) = TestStream::setup_test_server_and_logon(build_dictionary()); //Wait around for a Heartbeat and TestRequest. Ignore these so we can send a valid //ResendRequest below. thread::sleep(Duration::from_millis(5500)); let _ = test_server.recv_message::<Heartbeat>(); let _ = test_server.recv_message::<TestRequest>(); //Begin Logout. client.logout(connection); let _ = test_server.recv_message::<Logout>(); //Have server send a ResendRequest. let mut message = new_fixt_message!(ResendRequest); message.msg_seq_num = 2; message.begin_seq_no = 2; message.end_seq_no = 0; test_server.send_message(message); engine_gap_fill_resend_request!(client, connection, 2..5); let _ = engine_poll_message!(client, connection, ResendRequest); //Make sure client still responds to ResendRequest while logging out. let message = test_server.recv_message::<SequenceReset>(); assert_eq!(message.msg_seq_num, 2); assert_eq!(message.new_seq_no, 5); //Respond to logout and make sure client still logs out cleanly. let mut message = new_fixt_message!(Logout); message.msg_seq_num = 3; test_server.send_message(message); engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => { assert_eq!(terminated_connection,connection); assert!(if let ConnectionTerminatedReason::LocalRequested = reason { true } else { false }); }); } #[test] fn test_send_logout_and_recv_logout_with_high_msg_seq_num() { define_dictionary!( Heartbeat, Logon, Logout, ResendRequest, SequenceReset, TestRequest, ); //Connect and Logon. let (mut test_server, mut client, connection) = TestStream::setup_test_server_and_logon(build_dictionary()); //Begin Logout. client.logout(connection); let _ = test_server.recv_message::<Logout>(); //Respond with Logout containing high MsgSeqNum. let mut message = new_fixt_message!(Logout); message.msg_seq_num = 15; test_server.send_message(message); //Make sure client requests missing messages. let message = test_server.recv_message::<ResendRequest>(); assert_eq!(message.msg_seq_num, 3); assert_eq!(message.begin_seq_no, 2); assert!(message.end_seq_no == 0 || message.end_seq_no == 15); //Tell client about missing messages. let mut message = new_fixt_message!(SequenceReset); message.gap_fill_flag = true; message.msg_seq_num = 2; message.new_seq_no = 16; test_server.send_message(message); let _ = engine_poll_message!(client, connection, SequenceReset); //Make sure client automatically attempts to logout again after being caught up. let _ = test_server.recv_message::<Logout>(); //Finish logging out cleanly. let mut message = new_fixt_message!(Logout); message.msg_seq_num = 16; test_server.send_message(message); engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => { assert_eq!(terminated_connection,connection); assert!(if let ConnectionTerminatedReason::LocalRequested = reason { true } else { false }); }); } #[test] fn test_send_logout_and_recv_logout_with_high_msg_seq_num_and_no_reply() { define_dictionary!( Heartbeat, Logon, Logout, ResendRequest, SequenceReset, TestRequest, ); //Connect and Logon. let (mut test_server, mut client, connection) = TestStream::setup_test_server_and_logon(build_dictionary()); //Begin Logout. client.logout(connection); let _ = test_server.recv_message::<Logout>(); //Respond with Logout containing high MsgSeqNum. let mut message = new_fixt_message!(Logout); message.msg_seq_num = 15; test_server.send_message(message); //Make sure client requests missing messages. let message = test_server.recv_message::<ResendRequest>(); assert_eq!(message.msg_seq_num, 3); assert_eq!(message.begin_seq_no, 2); assert!(message.end_seq_no == 0 || message.end_seq_no == 15); //Wait around without replying to ResendRequest. thread::sleep(Duration::from_millis(10500)); //Make sure client disconnects instead of retrying the logout process. If the other end sends a //logout with an expected MsgSeqNum, then we saw a later MsgSeqNum once already and something //has gone terribly wrong. If the other sends a further out MsgSeqNum but won't reply to //ResendRequest then we're just going to keep looping. engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => { assert_eq!(terminated_connection,connection); assert!(if let ConnectionTerminatedReason::LogoutNoResponseError = reason { true } else { false }); }); } #[test] fn test_wrong_sender_comp_id_in_logon_response() { define_dictionary!(Logon, Logout, Reject,); //Connect and attempt logon. let (mut test_server, mut client, connection) = TestStream::setup_test_server(build_dictionary()); let message = new_logon_message(); client.send_message(connection, message); let _ = test_server.recv_message::<Logon>(); //Respond with a logon messaging containing the wrong SenderCompID. let mut message = new_logon_message(); message.sender_comp_id = b"unknown".to_vec(); test_server.send_message(message); //Confirm client sends Reject, Logout, and then disconnects. let message = test_server.recv_message::<Reject>(); assert_eq!(message.msg_seq_num, 2); assert_eq!(message.ref_seq_num, 1); assert_eq!( message.session_reject_reason.unwrap(), SessionRejectReason::CompIDProblem ); assert_eq!(message.text, b"CompID problem".to_vec()); let message = test_server.recv_message::<Logout>(); assert_eq!(message.text, b"SenderCompID is wrong".to_vec()); engine_poll_event!(client,EngineEvent::MessageRejected(msg_connection,rejected_message) => { assert_eq!(msg_connection,connection); let message = rejected_message.as_any().downcast_ref::<Logon>().expect("Not expected message type").clone(); assert_eq!(message.msg_seq_num,1); assert_eq!(message.sender_comp_id,b"unknown".to_vec()); }); engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => { assert_eq!(terminated_connection,connection); assert!(if let ConnectionTerminatedReason::SenderCompIDWrongError = reason { true } else { false }); }); } #[test] fn test_wrong_target_comp_id_in_logon_response() { define_dictionary!(Logon, Logout, Reject,); //Connect and attempt logon. let (mut test_server, mut client, connection) = TestStream::setup_test_server(build_dictionary()); let message = new_logon_message(); client.send_message(connection, message); let _ = test_server.recv_message::<Logon>(); //Respond with a logon messaging containing the wrong TargetCompID. let mut message = new_logon_message(); message.target_comp_id = b"unknown".to_vec(); test_server.send_message(message); //Confirm client sends Reject, Logout, and then disconnects. let message = test_server.recv_message::<Reject>(); assert_eq!(message.msg_seq_num, 2); assert_eq!(message.ref_seq_num, 1); assert_eq!( message.session_reject_reason.unwrap(), SessionRejectReason::CompIDProblem ); assert_eq!(message.text, b"CompID problem".to_vec()); let message = test_server.recv_message::<Logout>(); assert_eq!(message.text, b"TargetCompID is wrong".to_vec()); engine_poll_event!(client,EngineEvent::MessageRejected(msg_connection,rejected_message) => { assert_eq!(msg_connection,connection); let message = rejected_message.as_any().downcast_ref::<Logon>().expect("Not expected message type").clone(); assert_eq!(message.msg_seq_num,1); assert_eq!(message.target_comp_id,b"unknown".to_vec()); }); engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => { assert_eq!(terminated_connection,connection); assert!(if let ConnectionTerminatedReason::TargetCompIDWrongError = reason { true } else { false }); }); } #[test] fn test_overflowing_inbound_messages_buffer_does_resume() { //To prevent the client thread from stalling when receiving messages faster than they can be //parsed, it will automatically stop receiving bytes and parsing them into messages once //INBOUND_MESSAGES_BUFFER_LEN_MAX messages have been parsed. This test makes sure the client //thread resumes parsing bytes that have already been read in but not parsed without waiting //for a new network notification. define_dictionary!(Logon, Heartbeat, TestRequest,); //Connect and logon. let (mut test_server, mut client, connection) = TestStream::setup_test_server_and_logon(build_dictionary()); //Send INBOUND_MESSAGES_BUFFER_LEN_MAX + 1 TestRequests (hopefully) merged into a single TCP //frame. let mut bytes = Vec::new(); for x in 0..INBOUND_MESSAGES_BUFFER_LEN_MAX + 1 { let mut test_request_message = new_fixt_message!(TestRequest); test_request_message.msg_seq_num = (x + 2) as u64; test_request_message.test_req_id = b"test".to_vec(); serialize_and_append_message( &test_request_message, FIXVersion::FIXT_1_1, MessageVersion::FIX50SP2, &mut bytes, ); } assert!(bytes.len() < 1400); //Make sure the serialized body is reasonably likely to fit within the MTU. assert!(bytes.len() < INBOUND_BYTES_BUFFER_CAPACITY); //Make sure client thread can theoretically store all of the messages in a single recv(). let bytes_written = test_server.stream.write(&bytes).unwrap(); assert_eq!(bytes_written, bytes.len()); //Make sure client acknowledges messages as normal. for x in 0..INBOUND_MESSAGES_BUFFER_LEN_MAX + 1 { let message = engine_poll_message!(client, connection, TestRequest); assert_eq!(message.msg_seq_num, (x + 2) as u64); let message = test_server.recv_message::<Heartbeat>(); assert_eq!(message.msg_seq_num, (x + 2) as u64); } } #[test] fn test_sender_comp_id() { define_fixt_message!(TestMessage: b"9999" => { NOT_REQUIRED, text: Text [FIX50..], }); define_dictionary!(Logon, Reject, TestMessage,); //FIXT.1.1: Make sure SenderCompID has to be the fourth field. { //Connect and logon. let (mut test_server, mut client, connection) = TestStream::setup_test_server_and_logon_with_ver( FIXVersion::FIXT_1_1, MessageVersion::FIX50, build_dictionary(), ); //Accept when SenderCompID is the fourth tag. let target_comp_id_fifth_tag_message = b"8=FIXT.1.1\x019=48\x0135=9999\x0149=TX\x0156=TEST\x0134=2\x0152=20170105-01:01:01\x0110=236\x01"; let bytes_written = test_server .stream .write(target_comp_id_fifth_tag_message) .unwrap(); assert_eq!(bytes_written, target_comp_id_fifth_tag_message.len()); let message = engine_poll_message!(client, connection, TestMessage); assert_eq!(message.msg_seq_num, 2); assert_eq!(message.sender_comp_id, SERVER_SENDER_COMP_ID); //Reject when SenderCompID is the fifth tag. let sender_comp_id_fifth_tag_message = b"8=FIXT.1.1\x019=48\x0135=9999\x0156=TEST\x0149=TX\x0134=3\x0152=20170105-01:01:01\x0110=012\x01"; let bytes_written = test_server .stream .write(sender_comp_id_fifth_tag_message) .unwrap(); assert_eq!(bytes_written, sender_comp_id_fifth_tag_message.len()); let message = test_server.recv_message::<Reject>(); assert_eq!(message.msg_seq_num, 2); assert_eq!( message .session_reject_reason .expect("SessionRejectReason must be provided"), SessionRejectReason::TagSpecifiedOutOfRequiredOrder ); assert_eq!(message.text, b"SenderCompID must be the 4th tag".to_vec()); engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => { assert_eq!(msg_connection,connection); assert!(if let ParseError::SenderCompIDNotFourthTag = parse_error { true } else { false }); }); //Reject when SenderCompID is missing. let missing_sender_comp_id_tag_message = b"8=FIXT.1.1\x019=50\x0135=9999\x0156=TEST\x0134=10\x0152=20170105-01:01:01\x0110=086\x01"; let bytes_written = test_server .stream .write(missing_sender_comp_id_tag_message) .unwrap(); assert_eq!(bytes_written, missing_sender_comp_id_tag_message.len()); let message = test_server.recv_message::<Reject>(); assert_eq!(message.msg_seq_num, 3); assert_eq!( message .session_reject_reason .expect("SessionRejectReason must be provided"), SessionRejectReason::TagSpecifiedOutOfRequiredOrder ); assert_eq!(message.text, b"SenderCompID must be the 4th tag".to_vec()); engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => { assert_eq!(msg_connection,connection); assert!(if let ParseError::SenderCompIDNotFourthTag = parse_error { true } else { false }); }); } //FIX.4.0: Make sure SenderCompID does not have to be the fourth field. { //Connect and logon. let (mut test_server, mut client, connection) = TestStream::setup_test_server_and_logon_with_ver( FIXVersion::FIX_4_0, MessageVersion::FIX40, build_dictionary(), ); //Accept when SenderCompID is the fourth tag. let target_comp_id_fifth_tag_message = b"8=FIX.4.0\x019=48\x0135=9999\x0149=TX\x0156=TEST\x0134=2\x0152=20170105-01:01:01\x0110=154\x01"; let bytes_written = test_server .stream .write(target_comp_id_fifth_tag_message) .unwrap(); assert_eq!(bytes_written, target_comp_id_fifth_tag_message.len()); let message = engine_poll_message!(client, connection, TestMessage); assert_eq!(message.msg_seq_num, 2); assert_eq!(message.sender_comp_id, SERVER_SENDER_COMP_ID); //Accept when SenderCompID is the fifth tag. let sender_comp_id_fifth_tag_message = b"8=FIX.4.0\x019=48\x0135=9999\x0156=TEST\x0149=TX\x0134=3\x0152=20170105-01:01:01\x0110=155\x01"; let bytes_written = test_server .stream .write(sender_comp_id_fifth_tag_message) .unwrap(); assert_eq!(bytes_written, sender_comp_id_fifth_tag_message.len()); let message = engine_poll_message!(client, connection, TestMessage); assert_eq!(message.msg_seq_num, 3); assert_eq!(message.sender_comp_id, SERVER_SENDER_COMP_ID); //Reject when SenderCompID is missing. let missing_sender_comp_id_tag_message = b"8=FIX.4.0\x019=42\x0135=9999\x0156=TEST\x0134=4\x0152=20170105-01:01:01\x0110=063\x01"; let bytes_written = test_server .stream .write(missing_sender_comp_id_tag_message) .unwrap(); assert_eq!(bytes_written, missing_sender_comp_id_tag_message.len()); let message = test_server.recv_message::<Reject>(); assert_eq!(message.msg_seq_num, 2); assert_eq!(message.text, b"Required tag missing".to_vec()); engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => { assert_eq!(msg_connection,connection); assert!(if let ParseError::MissingRequiredTag(ref tag,_) = parse_error { *tag == SenderCompID::tag() } else { false }); }); } } #[test] fn test_target_comp_id() { define_fixt_message!(TestMessage: b"9999" => { NOT_REQUIRED, text: Text [FIX50..], }); define_dictionary!(Logon, Reject, TestMessage,); //FIXT.1.1: Make sure TargetCompID has to be the fifth field. { //Connect and logon. let (mut test_server, mut client, connection) = TestStream::setup_test_server_and_logon_with_ver( FIXVersion::FIXT_1_1, MessageVersion::FIX50, build_dictionary(), ); //Accept when TargetCompID is the fifth tag. let target_comp_id_fifth_tag_message = b"8=FIXT.1.1\x019=48\x0135=9999\x0149=TX\x0156=TEST\x0134=2\x0152=20170105-01:01:01\x0110=236\x01"; let bytes_written = test_server .stream .write(target_comp_id_fifth_tag_message) .unwrap(); assert_eq!(bytes_written, target_comp_id_fifth_tag_message.len()); let message = engine_poll_message!(client, connection, TestMessage); assert_eq!(message.msg_seq_num, 2); assert_eq!(message.target_comp_id, SERVER_TARGET_COMP_ID); //Reject when TargetCompID is the sixth tag. let target_comp_id_sixth_tag_message = b"8=FIXT.1.1\x019=48\x0135=9999\x0149=TX\x0134=3\x0156=TEST\x0152=20170105-01:01:01\x0110=237\x01"; let bytes_written = test_server .stream .write(target_comp_id_sixth_tag_message) .unwrap(); assert_eq!(bytes_written, target_comp_id_sixth_tag_message.len()); let message = test_server.recv_message::<Reject>(); assert_eq!(message.msg_seq_num, 2); assert_eq!( message .session_reject_reason .expect("SessionRejectReason must be provided"), SessionRejectReason::TagSpecifiedOutOfRequiredOrder ); assert_eq!(message.text, b"TargetCompID must be the 5th tag".to_vec()); engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => { assert_eq!(msg_connection,connection); assert!(if let ParseError::TargetCompIDNotFifthTag = parse_error { true } else { false }); }); //Reject when TargetCompID is missing. let missing_target_comp_id_tag_message = b"8=FIXT.1.1\x019=59\x0135=9999\x0149=TX\x0134=3\x0152=20170105-01:01:01\x0110=086\x01"; let bytes_written = test_server .stream .write(missing_target_comp_id_tag_message) .unwrap(); assert_eq!(bytes_written, missing_target_comp_id_tag_message.len()); let message = test_server.recv_message::<Reject>(); assert_eq!(message.msg_seq_num, 3); assert_eq!( message .session_reject_reason .expect("SessionRejectReason must be provided"), SessionRejectReason::TagSpecifiedOutOfRequiredOrder ); assert_eq!(message.text, b"TargetCompID must be the 5th tag".to_vec()); engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => { assert_eq!(msg_connection,connection); assert!(if let ParseError::TargetCompIDNotFifthTag = parse_error { true } else { false }); }); } //FIX.4.0: Make sure TargetCompID does not have to be the fifth field. { //Connect and logon. let (mut test_server, mut client, connection) = TestStream::setup_test_server_and_logon_with_ver( FIXVersion::FIX_4_0, MessageVersion::FIX40, build_dictionary(), ); //Accept when TargetCompID is the fifth tag. let target_comp_id_fifth_tag_message = b"8=FIX.4.0\x019=48\x0135=9999\x0149=TX\x0156=TEST\x0134=2\x0152=20170105-01:01:01\x0110=154\x01"; let bytes_written = test_server .stream .write(target_comp_id_fifth_tag_message) .unwrap(); assert_eq!(bytes_written, target_comp_id_fifth_tag_message.len()); let message = engine_poll_message!(client, connection, TestMessage); assert_eq!(message.msg_seq_num, 2); assert_eq!(message.target_comp_id, SERVER_TARGET_COMP_ID); //Accept when TargetCompID is the sixth tag. let target_comp_id_sixth_tag_message = b"8=FIX.4.0\x019=48\x0135=9999\x0149=TX\x0134=3\x0156=TEST\x0152=20170105-01:01:01\x0110=155\x01"; let bytes_written = test_server .stream .write(target_comp_id_sixth_tag_message) .unwrap(); assert_eq!(bytes_written, target_comp_id_sixth_tag_message.len()); let message = engine_poll_message!(client, connection, TestMessage); assert_eq!(message.msg_seq_num, 3); assert_eq!(message.target_comp_id, SERVER_TARGET_COMP_ID); //Reject when TargetCompID is missing. let missing_target_comp_id_tag_message = b"8=FIX.4.0\x019=40\x0135=9999\x0149=TX\x0134=4\x0152=20170105-01:01:01\x0110=171\x01"; let bytes_written = test_server .stream .write(missing_target_comp_id_tag_message) .unwrap(); assert_eq!(bytes_written, missing_target_comp_id_tag_message.len()); let message = test_server.recv_message::<Reject>(); assert_eq!(message.msg_seq_num, 2); assert_eq!(message.text, b"Required tag missing".to_vec()); engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => { assert_eq!(msg_connection,connection); assert!(if let ParseError::MissingRequiredTag(ref tag,_) = parse_error { *tag == TargetCompID::tag() } else { false }); }); } } #[test] fn test_default_appl_ver_id() { define_fixt_message!(TestMessage: b"9999" => { REQUIRED, text: Text [FIX50..], }); define_fixt_message!(TestMessage2: b"9999" => { REQUIRED, text: Text [FIX40..], }); define_dictionary!(Logon, TestMessage,); //Connect and logon. let (mut test_server, mut client, connection) = TestStream::setup_test_server_and_logon_with_ver( FIXVersion::FIXT_1_1, MessageVersion::FIX40, build_dictionary(), ); //Make sure DefaultApplVerID is respected for sent messages. { //Make client send a TestMessage. let mut message = new_fixt_message!(TestMessage); message.text = b"text".to_vec(); client.send_message(connection, message); //Confirm text field was excluded by server due to requiring >= FIX50 but default is FIX40. let message = test_server.recv_message::<TestMessage>(); assert_eq!(message.text.len(), 0); } //Make sure DefaultApplVerID is respected for received messages. { //Make server send a TestMessage. let mut message = new_fixt_message!(TestMessage); message.msg_seq_num = 2; message.text = b"text".to_vec(); test_server.send_message(message); //Confirm text field was excluded by client due to requiring >= FIX50 but default is FIX40. let message = engine_poll_message!(client, connection, TestMessage); assert_eq!(message.text.len(), 0); //Make sever send a TestMessage again but force the text field to be sent. let mut message = new_fixt_message!(TestMessage2); message.msg_seq_num = 3; message.text = b"text".to_vec(); test_server.send_message(message); //Make sure message is considered invalid. engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => { assert_eq!(msg_connection,connection); assert!(if let ParseError::UnknownTag(ref tag) = parse_error { *tag == FieldTag(58) } else { false }); }); } } #[test] fn test_appl_ver_id() { define_fixt_message!(TestMessage: b"9999" => { REQUIRED, text: Text [FIX50..], }); define_dictionary!(Logon, Reject, TestMessage,); //Make sure when ApplVerID is specified after the sixth field, Engine responds with an //appropriate Reject message and notification. { //Connect and logon. let (mut test_server, mut client, connection) = TestStream::setup_test_server_and_logon(build_dictionary()); //Send TestMessage with ApplVerID field as the seventh tag. let appl_ver_id_seventh_tag_message = b"8=FIXT.1.1\x019=44\x0135=9999\x0149=SERVER\x0156=CLIENT\x0134=2\x011128=9\x0110=000\x01"; let bytes_written = test_server .stream .write(appl_ver_id_seventh_tag_message) .unwrap(); assert_eq!(bytes_written, appl_ver_id_seventh_tag_message.len()); //Make sure Engine responds with an appropriate reject. let message = test_server.recv_message::<Reject>(); assert_eq!( message.session_reject_reason.unwrap(), SessionRejectReason::TagSpecifiedOutOfRequiredOrder ); assert_eq!( message.text, b"ApplVerID must be the 6th tag if specified".to_vec() ); //Make sure Engine indicates that it rejected the message. engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => { assert_eq!(msg_connection,connection); assert!(if let ParseError::ApplVerIDNotSixthTag = parse_error { true } else { false }); }); } //Make sure ApplVerID overrides the default message version set in the initial Logon message. { //Connect and logon. let (mut test_server, mut client, connection) = TestStream::setup_test_server_and_logon(build_dictionary()); //Send TestMessage with ApplVerID < FIX50 and without text field. let mut message = new_fixt_message!(TestMessage); message.msg_seq_num = 2; message.appl_ver_id = Some(MessageVersion::FIX40); test_server.send_message_with_ver( FIXVersion::FIXT_1_1, message.appl_ver_id.unwrap(), message, ); //Confirm Engine accepted message correctly. let message = engine_poll_message!(client, connection, TestMessage); assert_eq!(message.appl_ver_id, Some(MessageVersion::FIX40)); assert_eq!(message.text.len(), 0); //Send TestMessage with ApplVerID < FIX50 and with text field. let mut message = new_fixt_message!(TestMessage); message.msg_seq_num = 3; message.appl_ver_id = Some(MessageVersion::FIX40); message.text = b"text".to_vec(); test_server.send_message_with_ver(FIXVersion::FIXT_1_1, MessageVersion::FIX50SP2, message); //Force text field to be included. //Confirm Engine rejected message because text field is unsupported for this version. let message = test_server.recv_message::<Reject>(); assert_eq!( message.session_reject_reason.unwrap(), SessionRejectReason::TagNotDefinedForThisMessageType ); assert_eq!( message.text, b"Tag not defined for this message type".to_vec() ); engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => { assert_eq!(msg_connection,connection); assert!(if let ParseError::UnexpectedTag(ref tag) = parse_error { *tag == Text::tag() } else { false }); }); } } #[test] fn test_message_type_default_application_version() { define_fixt_message!(TestMessage: b"9999" => { REQUIRED, text: Text [FIX50SP1..], }); define_dictionary!(Logon, Reject, TestMessage,); //Connect. let (mut test_server, mut client, connection) = TestStream::setup_test_server(build_dictionary()); //Logon. let mut logon_message = new_logon_message(); logon_message.default_appl_ver_id = MessageVersion::FIX50; client.send_message_box_with_message_version( connection, MessageVersion::FIX50SP2, Box::new(logon_message), ); let message = test_server.recv_message::<Logon>(); assert_eq!(message.msg_seq_num, 1); let mut response_message = new_fixt_message!(Logon); response_message.encrypt_method = message.encrypt_method; response_message.heart_bt_int = message.heart_bt_int; response_message.default_appl_ver_id = message.default_appl_ver_id; let mut msg_type_grp = MsgTypeGrp::new(); msg_type_grp.ref_msg_type = TestMessage::msg_type().to_vec(); msg_type_grp.ref_appl_ver_id = Some(MessageVersion::FIX50SP1); msg_type_grp.msg_direction = MsgDirection::Send; msg_type_grp.default_ver_indicator = true; response_message.no_msg_types.push(Box::new(msg_type_grp)); test_server.send_message_with_ver( FIXVersion::FIXT_1_1, MessageVersion::FIX50SP2, response_message, ); engine_poll_event!(client,EngineEvent::SessionEstablished(_) => {}); let message = engine_poll_message!(client, connection, Logon); assert_eq!(message.msg_seq_num, 1); //Make sure specifying a message type specific default application version overrides the //default message version. { //Send TestMessage text field. let mut message = new_fixt_message!(TestMessage); message.msg_seq_num = 2; message.text = b"test".to_vec(); test_server.send_message_with_ver(FIXVersion::FIXT_1_1, MessageVersion::FIX50SP1, message); //Confirm Engine accepted message correctly. let message = engine_poll_message!(client, connection, TestMessage); assert_eq!( message.meta.unwrap().message_version, MessageVersion::FIX50SP1 ); //Set by parser what it parsed message as. assert_eq!(message.text, b"test"); } //Make sure ApplVerID overrides the message type specific default application version. { //Send TestMessage with explicit ApplVerID < FIX50 and without text field. let mut message = new_fixt_message!(TestMessage); message.msg_seq_num = 3; message.appl_ver_id = Some(MessageVersion::FIX40); test_server.send_message_with_ver( FIXVersion::FIXT_1_1, message.appl_ver_id.unwrap(), message, ); //Confirm Engine accepted message correctly. let message = engine_poll_message!(client, connection, TestMessage); assert_eq!(message.meta.unwrap().message_version, MessageVersion::FIX40); assert_eq!(message.text.len(), 0); } } #[test] fn test_respond_to_test_request_immediately_after_logon() { //Special processing is required to adjust which messages are acceptable after Logon is //received. But the IO processing is level based so the event loop might not be notified of //remaining data. This test makes sure the remaining data is processed immediately. In //practice, the worst case scenario is some type of timeout would trigger a Heartbeat or a //TestRequest that would cause the remaining data to be read. define_dictionary!(Logon, Heartbeat, TestRequest,); //Connect to server. let (mut test_server, mut client, connection) = TestStream::setup_test_server(build_dictionary()); //Have client send Logon. client.send_message_box(connection, Box::new(new_logon_message())); let message = test_server.recv_message::<Logon>(); assert_eq!(message.msg_seq_num, 1); //Respond with Logon and TestRequest (hopefully) merged into a single TCP packet. let mut logon_message = new_fixt_message!(Logon); logon_message.msg_seq_num = 1; logon_message.encrypt_method = message.encrypt_method; logon_message.heart_bt_int = message.heart_bt_int; logon_message.default_appl_ver_id = message.default_appl_ver_id; let mut test_request_message = new_fixt_message!(TestRequest); test_request_message.msg_seq_num = 2; test_request_message.test_req_id = b"test".to_vec(); let mut bytes = Vec::new(); serialize_and_append_message( &logon_message, FIXVersion::FIXT_1_1, MessageVersion::FIX50SP2, &mut bytes, ); serialize_and_append_message( &test_request_message, FIXVersion::FIXT_1_1, MessageVersion::FIX50SP2, &mut bytes, ); assert!(bytes.len() < 1400); //Make sure the serialized body is reasonably likely to fit within the MTU. let bytes_written = test_server.stream.write(&bytes).unwrap(); assert_eq!(bytes_written, bytes.len()); //Make sure client acknowledges both as normal. engine_poll_event!(client,EngineEvent::SessionEstablished(_) => {}); let message = engine_poll_message!(client, connection, Logon); assert_eq!(message.msg_seq_num, 1); let message = engine_poll_message!(client, connection, TestRequest); assert_eq!(message.msg_seq_num, 2); let message = test_server.recv_message::<Heartbeat>(); assert_eq!(message.msg_seq_num, 2); } #[test] fn test_respect_default_appl_ver_id_in_test_request_immediately_after_logon() { //This is very similar to test_respond_to_test_request_immediately_after_logon() above except //it makes sure the DefaultApplVerID is used correctly for the message right after Logon. define_fixt_message!(TestMessage: b"9999" => { REQUIRED, text: Text [FIX50SP2..], }); define_dictionary!(Logon, Logout, Reject, TestMessage,); //Connect to server. let (mut test_server, mut client, connection) = TestStream::setup_test_server(build_dictionary()); //Have client send Logon. let mut logon_message = new_logon_message(); logon_message.default_appl_ver_id = MessageVersion::FIX50SP2; client.send_message_box(connection, Box::new(logon_message)); let message = test_server.recv_message::<Logon>(); assert_eq!(message.msg_seq_num, 1); //Respond with Logon and TestMessage (hopefully) merged into a single TCP packet. let mut logon_message = new_fixt_message!(Logon); logon_message.msg_seq_num = 1; logon_message.encrypt_method = message.encrypt_method; logon_message.heart_bt_int = message.heart_bt_int; logon_message.default_appl_ver_id = message.default_appl_ver_id; let mut test_message = new_fixt_message!(TestMessage); test_message.msg_seq_num = 2; test_message.text = b"test".to_vec(); let mut bytes = Vec::new(); serialize_and_append_message( &logon_message, FIXVersion::FIXT_1_1, MessageVersion::FIX50SP2, &mut bytes, ); serialize_and_append_message( &test_message, FIXVersion::FIXT_1_1, MessageVersion::FIX50SP2, &mut bytes, ); assert!(bytes.len() < 1400); //Make sure the serialized body is reasonably likely to fit within the MTU. let bytes_written = test_server.stream.write(&bytes).unwrap(); assert_eq!(bytes_written, bytes.len()); //Make sure client acknowledges Logon as normal. engine_poll_event!(client,EngineEvent::SessionEstablished(_) => {}); let message = engine_poll_message!(client, connection, Logon); assert_eq!(message.msg_seq_num, 1); //Make sure client applies DefaultApplVerID version to TestMessage so that the Text field is //parsed. let message = engine_poll_message!(client, connection, TestMessage); assert_eq!(message.msg_seq_num, 2); assert_eq!(message.text, b"test".to_vec()); } #[test] fn test_logout_and_terminate_wrong_versioned_test_request_immediately_after_logon() { //This is very similar to test_respond_to_test_request_immediately_after_logon() above except //it makes sure using the wrong FIX version follows the typical Logout and disconnect as //expected. define_dictionary!(Logon, Logout, TestRequest,); //Connect to server. let (mut test_server, mut client, connection) = TestStream::setup_test_server(build_dictionary()); //Have client send Logon. client.send_message_box(connection, Box::new(new_logon_message())); let message = test_server.recv_message::<Logon>(); assert_eq!(message.msg_seq_num, 1); //Respond with Logon and TestRequest (hopefully) merged into a single TCP packet. let mut logon_message = new_fixt_message!(Logon); logon_message.msg_seq_num = 1; logon_message.encrypt_method = message.encrypt_method; logon_message.heart_bt_int = message.heart_bt_int; logon_message.default_appl_ver_id = message.default_appl_ver_id; let mut test_request_message = new_fixt_message!(TestRequest); test_request_message.msg_seq_num = 2; test_request_message.test_req_id = b"test".to_vec(); let mut bytes = Vec::new(); serialize_and_append_message( &logon_message, FIXVersion::FIXT_1_1, MessageVersion::FIX50SP2, &mut bytes, ); serialize_and_append_message( &test_request_message, FIXVersion::FIX_4_2, MessageVersion::FIX42, &mut bytes, ); assert!(bytes.len() < 1400); //Make sure the serialized body is reasonably likely to fit within the MTU. let bytes_written = test_server.stream.write(&bytes).unwrap(); assert_eq!(bytes_written, bytes.len()); //Make sure client acknowledges Logon as normal. engine_poll_event!(client,EngineEvent::SessionEstablished(_) => {}); let message = engine_poll_message!(client, connection, Logon); assert_eq!(message.msg_seq_num, 1); //Make sure Engine sends Logout and then disconnects. let message = test_server.recv_message::<Logout>(); assert_eq!( message.text, b"BeginStr is wrong, expected 'FIXT.1.1' but received 'FIX.4.2'".to_vec() ); engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => { assert_eq!(terminated_connection,connection); assert!( if let ConnectionTerminatedReason::BeginStrWrongError{received,expected} = reason { assert_eq!(received,FIXVersion::FIX_4_2); assert_eq!(expected,FIXVersion::FIXT_1_1); true } else { false } ); }); } #[test] fn test_max_message_size() { const MAX_MESSAGE_SIZE: u64 = 4096; define_fixt_message!(TestMessage: b"9999" => { REQUIRED, text: Text [FIX40..], }); define_dictionary!(Logon, Logout, Reject, TestMessage,); fn message_length<T: Message>(message: &T) -> u64 { let mut buffer = ByteBuffer::new(); message.read(FIXVersion::FIXT_1_1, MessageVersion::FIX50SP2, &mut buffer) as u64 } //Make sure exceeding the MaxMessageSize in messages after Logon results in a Reject message. { //Connect to server. let (mut test_server, mut client, connection) = TestStream::setup_test_server(build_dictionary()); //Have client send Logon. let mut message = new_logon_message(); message.max_message_size = MAX_MESSAGE_SIZE; client.send_message_box(connection, Box::new(message)); let message = test_server.recv_message::<Logon>(); assert_eq!(message.msg_seq_num, 1); assert_eq!(message.max_message_size, MAX_MESSAGE_SIZE); //Acknowledge Logon. let mut response_message = new_fixt_message!(Logon); response_message.encrypt_method = message.encrypt_method; response_message.heart_bt_int = message.heart_bt_int; response_message.default_appl_ver_id = message.default_appl_ver_id; test_server.send_message(response_message); engine_poll_event!(client,EngineEvent::SessionEstablished(_) => {}); let message = engine_poll_message!(client, connection, Logon); assert_eq!(message.msg_seq_num, 1); //Try and send Engine a message exceeding MAX_MESSAGE_SIZE. let mut message = new_fixt_message!(TestMessage); message.msg_seq_num = 2; let current_message_len = message_length(&message); for _ in 0..(MAX_MESSAGE_SIZE - current_message_len) + 1 { message.text.push(b'A'); } test_server.send_message(message); //Make sure Engine rejected the message. let message = test_server.recv_message::<Reject>(); assert_eq!(message.msg_seq_num, 2); assert_eq!(message.ref_seq_num, 2); assert_eq!( message.session_reject_reason.unwrap(), SessionRejectReason::Other ); let mut expected_error_text = b"Message size exceeds MaxMessageSize=".to_vec(); expected_error_text.extend_from_slice(MAX_MESSAGE_SIZE.to_string().as_bytes()); assert_eq!(message.text, expected_error_text); } //Make sure exceeding the MaxMessageSize in the Logon response results in the Engine just //disconnecting. { //Connect to server. let (mut test_server, mut client, connection) = TestStream::setup_test_server(build_dictionary()); //Have client send Logon. let mut message = new_logon_message(); message.max_message_size = MAX_MESSAGE_SIZE; client.send_message_box(connection, Box::new(message)); let message = test_server.recv_message::<Logon>(); assert_eq!(message.msg_seq_num, 1); assert_eq!(message.max_message_size, MAX_MESSAGE_SIZE); //Respond with Logon message that exceeds MAX_MESSAGE_SIZE. let mut response_message = new_fixt_message!(Logon); response_message.encrypt_method = message.encrypt_method.clone(); response_message.heart_bt_int = message.heart_bt_int; response_message.default_appl_ver_id = message.default_appl_ver_id; while message_length(&response_message) <= MAX_MESSAGE_SIZE { let mut msg_type_grp = MsgTypeGrp::new(); msg_type_grp.ref_msg_type = b"L".to_vec(); msg_type_grp.ref_appl_ver_id = Some(MessageVersion::FIX50SP1); msg_type_grp.msg_direction = MsgDirection::Send; response_message.no_msg_types.push(Box::new(msg_type_grp)); } test_server.send_message(response_message); //Make sure Engine just disconnects. engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => { assert_eq!(terminated_connection,connection); assert!(if let ConnectionTerminatedReason::LogonParseError(parse_error) = reason { if let ParseError::MessageSizeTooBig = parse_error { true } else { false } } else { false }); }); } } #[test] fn test_block_read_when_write_blocks() { define_dictionary!(Logon, Heartbeat, Reject, ResendRequest, TestRequest,); //Send a bunch of messages to Engine without reading the responses. Engine should stop reading //until it can write again. { //Connect and Logon. let (mut test_server, client, _) = TestStream::setup_test_server_and_logon(build_dictionary()); //Run a background thread to drain client events until they stop. The stopping indicates the //client has stopped accepting new messages. let client = Arc::new(Mutex::new(client)); //Keep client around even after thread ends. let client_clone = client.clone(); //Clone to be passed to thread. let thread_running = Arc::new(AtomicBool::new(true)); let thread_running_clone = thread_running.clone(); let thread_handle = thread::spawn(move || { let mut client = client_clone.lock().unwrap(); while let Some(event) = client.poll(Duration::from_secs(2)) { match event { EngineEvent::ConnectionTerminated(_, _) => { panic!("Engine should not have terminated connection yet.") } _ => {} } } thread_running_clone.store(false, Ordering::Relaxed); }); //Flood client with TestRequest messages until thread notifies that messages are being //blocked. let mut outbound_msg_seq_num = 2; let now = Instant::now(); let mut stop_writing = false; loop { if !thread_running.load(Ordering::Relaxed) { thread_handle.join().expect("Thread must be stopped."); break; } else if now.elapsed() > Duration::from_secs(15) { panic!("Engine never blocked receiving of new messages."); } if !stop_writing { let mut message = new_fixt_message!(TestRequest); message.msg_seq_num = outbound_msg_seq_num; message.test_req_id = b"test".to_vec(); if let Err(bytes_not_written) = test_server.send_message_with_timeout(message, Duration::from_millis(10)) { //Stop writing new messages because TCP indicated that the other side is //congested. stop_writing = true; if bytes_not_written > 0 { continue; } } outbound_msg_seq_num += 1; } } //Drain server's read buffer. loop { let message = test_server.recv_message::<Heartbeat>(); if message.msg_seq_num == outbound_msg_seq_num - 1 { break; } } //Send gibberish that will force an incomplete message to be discarded. let _ = test_server.stream.write(b"\x0110=000\x01=000"); //Make sure messages continue to flow again. let mut message = new_fixt_message!(TestRequest); message.msg_seq_num = outbound_msg_seq_num + 1; message.test_req_id = b"final".to_vec(); test_server.send_message(message); let message = test_server.recv_fixt_message(); let message = match message_to_enum(message) { MessageEnum::Heartbeat(message) => message, _ => Box::from(test_server.recv_message::<Heartbeat>()), }; assert_eq!(message.test_req_id, b"final"); } //Same as above but never drain the server's read buffer so the connection must eventually be //dropped. { //Connect and Logon. let (mut test_server, mut client, _) = TestStream::setup_test_server_and_logon(build_dictionary()); //Flood client with TestRequest messages until Engine drops the connection. let mut outbound_msg_seq_num = 2; let now = Instant::now(); let mut stop_writing = false; loop { if now.elapsed() > Duration::from_secs(30) { panic!("Engine never disconnected."); } if let Some(EngineEvent::ConnectionTerminated(_, reason)) = client.poll(Duration::from_millis(0)) { assert!( if let ConnectionTerminatedReason::SocketNotWritableTimeoutError = reason { true } else { false } ); assert!(test_server.is_stream_closed(Duration::from_secs(3))); //Success! Engine disconnected. break; } if !stop_writing { let mut message = new_fixt_message!(TestRequest); message.msg_seq_num = outbound_msg_seq_num; message.test_req_id = b"test".to_vec(); if let Err(_) = test_server.send_message_with_timeout(message, Duration::from_millis(10)) { stop_writing = true; } outbound_msg_seq_num += 1; } } } } #[test] fn test_inbound_resend_loop_detection() { define_dictionary!( Logon, Logout, Heartbeat, ResendRequest, SequenceReset, TestRequest, ); //Connect and logon. let (mut test_server, mut client, connection) = TestStream::setup_test_server_and_logon(build_dictionary()); //Have server send TestRequest so Engine responds with a Heartbeat. let mut message = new_fixt_message!(TestRequest); message.msg_seq_num = 2; message.test_req_id = b"test".to_vec(); test_server.send_message(message); engine_poll_message!(client, connection, TestRequest); let message = test_server.recv_message::<Heartbeat>(); assert_eq!(message.msg_seq_num, 2); assert_eq!(message.test_req_id, b"test"); //Have server ignore the Heartbeat response by sending ResendRequest a few times. The client //should eventually logout and disconnect. const BASE_MSG_SEQ_NUM: u64 = 3; for x in 0..AUTO_DISCONNECT_AFTER_INBOUND_RESEND_REQUEST_LOOP_COUNT { let mut message = new_fixt_message!(ResendRequest); message.msg_seq_num = BASE_MSG_SEQ_NUM + x; message.begin_seq_no = 2; message.end_seq_no = 0; test_server.send_message(message); engine_gap_fill_resend_request!(client, connection, 2..3); let _ = engine_poll_message!(client, connection, ResendRequest); let message = test_server.recv_message::<SequenceReset>(); assert_eq!(message.gap_fill_flag, true); assert_eq!(message.new_seq_no, 3); assert_eq!(message.msg_seq_num, 2); } let mut message = new_fixt_message!(ResendRequest); message.msg_seq_num = BASE_MSG_SEQ_NUM + AUTO_DISCONNECT_AFTER_INBOUND_RESEND_REQUEST_LOOP_COUNT; message.begin_seq_no = 2; message.end_seq_no = 0; test_server.send_message(message); let message = test_server.recv_message::<Logout>(); assert_eq!( message.text, b"Detected ResendRequest loop for BeginSeqNo 2".to_vec() ); engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => { assert_eq!(terminated_connection,connection); assert!(if let ConnectionTerminatedReason::InboundResendRequestLoopError = reason { true } else { false }); }); assert!(test_server.is_stream_closed(Duration::from_secs(3))); }
39.73018
147
0.67561
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_recv_resend_request_invalid_end_seq_no() {\n define_dictionary!(Logon, ResendRequest, Reject,);\n\n //Connect and Logon.\n let (mut test_server, _client, _) = TestStream::setup_test_server_and_logon(build_dictionary());\n\n //Send ResendRequest to client with EndSeqNo < BeginSeqNo.\n let mut message = new_fixt_message!(ResendRequest);\n message.msg_seq_num = 5;\n message.begin_seq_no = 2;\n message.end_seq_no = 1;\n test_server.send_message(message);\n\n //Make sure client responds with an appropriate Reject.\n let message = test_server.recv_message::<Reject>();\n assert_eq!(message.msg_seq_num, 2);\n assert_eq!(message.ref_seq_num, 5);\n assert_eq!(\n message.session_reject_reason.unwrap(),\n SessionRejectReason::ValueIsIncorrectForThisTag\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_send_logout_before_logon() {\n define_dictionary!(Logon, Logout,);\n\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server(build_dictionary());\n\n //Send Logout immediately.\n let mut message = new_fixt_message!(Logout);\n message.msg_seq_num = 1;\n test_server.send_message(message);\n\n //Give client thread a chance to disconnect.\n thread::sleep(Duration::from_millis(500));\n\n //Confirm the client socket disconnected.\n assert!(test_server.is_stream_closed(Duration::from_secs(5)));\n\n //Confirm client notified that it disconnected.\n engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => {\n assert_eq!(terminated_connection,connection);\n assert!(if let ConnectionTerminatedReason::LogonNotFirstMessageError = reason { true } else { false });\n });\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_recv_logout_with_high_msg_seq_num() {\n define_dictionary!(Logon, Logout, ResendRequest, SequenceReset,);\n\n //Connect and Logon.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server_and_logon(build_dictionary());\n\n //Send Logout with a high MsgSeqNum\n let mut message = new_fixt_message!(Logout);\n message.msg_seq_num = 15;\n test_server.send_message(message);\n\n //Make sure client tries to retrieve the missing messages.\n let message = test_server.recv_message::<ResendRequest>();\n assert_eq!(message.begin_seq_no, 2);\n assert!(message.end_seq_no == 0 || message.end_seq_no == 14);\n\n //Respond with gap-fill.\n let mut message = new_fixt_message!(SequenceReset);\n message.gap_fill_flag = true;\n message.new_seq_no = 15;\n message.msg_seq_num = 2;\n test_server.send_message(message);\n let _ = engine_poll_message!(client, connection, SequenceReset);\n\n //Make sure client responds with Logout now that it's caught up.\n let message = test_server.recv_message::<Logout>();\n assert_eq!(message.msg_seq_num, 3);\n\n //Close connection and make sure client notifies that connection closed cleanly.\n let _ = test_server.stream.shutdown(Shutdown::Both);\n engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => {\n assert_eq!(terminated_connection,connection);\n assert!(if let ConnectionTerminatedReason::RemoteRequested = reason { true } else { false });\n });\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_recv_logout_with_high_msg_seq_num_and_no_reply() {\n define_dictionary!(Logon, Logout, ResendRequest, SequenceReset,);\n\n //Connect and Logon.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server_and_logon(build_dictionary());\n\n //Send Logout with a high MsgSeqNum\n let mut message = new_fixt_message!(Logout);\n message.msg_seq_num = 15;\n test_server.send_message(message);\n\n //Make sure client tries to retrieve the missing messages.\n let message = test_server.recv_message::<ResendRequest>();\n assert_eq!(message.begin_seq_no, 2);\n assert!(message.end_seq_no == 0 || message.end_seq_no == 14);\n\n //Wait around without replying to ResendRequest.\n thread::sleep(Duration::from_millis(10500));\n\n //Make sure client responds with Logout even though it didn't get caught up.\n let message = test_server.recv_message::<Logout>();\n assert_eq!(message.msg_seq_num, 3);\n\n //Close connection and make sure client notifies that connection closed cleanly.\n let _ = test_server.stream.shutdown(Shutdown::Both);\n engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => {\n assert_eq!(terminated_connection,connection);\n assert!(if let ConnectionTerminatedReason::RemoteRequested = reason { true } else { false });\n });\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_recv_logout_send_logout_recv_resend_request() {\n define_dictionary!(\n Heartbeat,\n Logon,\n Logout,\n ResendRequest,\n SequenceReset,\n TestRequest,\n );\n\n //Connect and Logon.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server_and_logon(build_dictionary());\n\n //Send Logout to client.\n let mut message = new_fixt_message!(Logout);\n message.msg_seq_num = 2;\n test_server.send_message(message);\n\n //Make sure client responds with Logout.\n let message = test_server.recv_message::<Logout>();\n assert_eq!(message.msg_seq_num, 2);\n let _ = engine_poll_message!(client, connection, Logout);\n\n //Ask client for missing messages even though they already responded to Logout. This should\n //cancel the logout when done before the timeout.\n let mut message = new_fixt_message!(ResendRequest);\n message.msg_seq_num = 3;\n message.begin_seq_no = 2;\n message.end_seq_no = 0;\n test_server.send_message(message);\n\n //Handle the resend request.\n engine_gap_fill_resend_request!(client, connection, 2..3);\n let _ = engine_poll_message!(client, connection, ResendRequest);\n\n //Make sure ResendRequest is responded to.\n let message = test_server.recv_message::<SequenceReset>();\n assert_eq!(message.msg_seq_num, 2);\n assert_eq!(message.new_seq_no, 3);\n\n //Wait around to make sure the server requested logout was cancelled.\n thread::sleep(Duration::from_millis(5500));\n let _ = test_server.recv_message::<Heartbeat>();\n let _ = test_server.recv_message::<TestRequest>();\n\n //Try and logout again but cleanly this time.\n let mut message = new_fixt_message!(Logout);\n message.msg_seq_num = 4;\n test_server.send_message(message);\n let _ = engine_poll_message!(client, connection, Logout);\n\n //Close connection and make sure client notifies that connection closed cleanly.\n let _ = test_server.stream.shutdown(Shutdown::Both);\n engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => {\n assert_eq!(terminated_connection,connection);\n assert!(if let ConnectionTerminatedReason::RemoteRequested = reason { true } else { false });\n });\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_send_logout_and_recv_resend_request() {\n define_dictionary!(\n Heartbeat,\n Logon,\n Logout,\n ResendRequest,\n SequenceReset,\n TestRequest,\n );\n\n //Connect and Logon.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server_and_logon(build_dictionary());\n\n //Wait around for a Heartbeat and TestRequest. Ignore these so we can send a valid\n //ResendRequest below.\n thread::sleep(Duration::from_millis(5500));\n let _ = test_server.recv_message::<Heartbeat>();\n let _ = test_server.recv_message::<TestRequest>();\n\n //Begin Logout.\n client.logout(connection);\n let _ = test_server.recv_message::<Logout>();\n\n //Have server send a ResendRequest.\n let mut message = new_fixt_message!(ResendRequest);\n message.msg_seq_num = 2;\n message.begin_seq_no = 2;\n message.end_seq_no = 0;\n test_server.send_message(message);\n\n engine_gap_fill_resend_request!(client, connection, 2..5);\n let _ = engine_poll_message!(client, connection, ResendRequest);\n\n //Make sure client still responds to ResendRequest while logging out.\n let message = test_server.recv_message::<SequenceReset>();\n assert_eq!(message.msg_seq_num, 2);\n assert_eq!(message.new_seq_no, 5);\n\n //Respond to logout and make sure client still logs out cleanly.\n let mut message = new_fixt_message!(Logout);\n message.msg_seq_num = 3;\n test_server.send_message(message);\n\n engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => {\n assert_eq!(terminated_connection,connection);\n assert!(if let ConnectionTerminatedReason::LocalRequested = reason { true } else { false });\n });\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_send_logout_and_recv_logout_with_high_msg_seq_num() {\n define_dictionary!(\n Heartbeat,\n Logon,\n Logout,\n ResendRequest,\n SequenceReset,\n TestRequest,\n );\n\n //Connect and Logon.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server_and_logon(build_dictionary());\n\n //Begin Logout.\n client.logout(connection);\n let _ = test_server.recv_message::<Logout>();\n\n //Respond with Logout containing high MsgSeqNum.\n let mut message = new_fixt_message!(Logout);\n message.msg_seq_num = 15;\n test_server.send_message(message);\n\n //Make sure client requests missing messages.\n let message = test_server.recv_message::<ResendRequest>();\n assert_eq!(message.msg_seq_num, 3);\n assert_eq!(message.begin_seq_no, 2);\n assert!(message.end_seq_no == 0 || message.end_seq_no == 15);\n\n //Tell client about missing messages.\n let mut message = new_fixt_message!(SequenceReset);\n message.gap_fill_flag = true;\n message.msg_seq_num = 2;\n message.new_seq_no = 16;\n test_server.send_message(message);\n let _ = engine_poll_message!(client, connection, SequenceReset);\n\n //Make sure client automatically attempts to logout again after being caught up.\n let _ = test_server.recv_message::<Logout>();\n\n //Finish logging out cleanly.\n let mut message = new_fixt_message!(Logout);\n message.msg_seq_num = 16;\n test_server.send_message(message);\n\n engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => {\n assert_eq!(terminated_connection,connection);\n assert!(if let ConnectionTerminatedReason::LocalRequested = reason { true } else { false });\n });\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_send_logout_and_recv_logout_with_high_msg_seq_num_and_no_reply() {\n define_dictionary!(\n Heartbeat,\n Logon,\n Logout,\n ResendRequest,\n SequenceReset,\n TestRequest,\n );\n\n //Connect and Logon.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server_and_logon(build_dictionary());\n\n //Begin Logout.\n client.logout(connection);\n let _ = test_server.recv_message::<Logout>();\n\n //Respond with Logout containing high MsgSeqNum.\n let mut message = new_fixt_message!(Logout);\n message.msg_seq_num = 15;\n test_server.send_message(message);\n\n //Make sure client requests missing messages.\n let message = test_server.recv_message::<ResendRequest>();\n assert_eq!(message.msg_seq_num, 3);\n assert_eq!(message.begin_seq_no, 2);\n assert!(message.end_seq_no == 0 || message.end_seq_no == 15);\n\n //Wait around without replying to ResendRequest.\n thread::sleep(Duration::from_millis(10500));\n\n //Make sure client disconnects instead of retrying the logout process. If the other end sends a\n //logout with an expected MsgSeqNum, then we saw a later MsgSeqNum once already and something\n //has gone terribly wrong. If the other sends a further out MsgSeqNum but won't reply to\n //ResendRequest then we're just going to keep looping.\n engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => {\n assert_eq!(terminated_connection,connection);\n assert!(if let ConnectionTerminatedReason::LogoutNoResponseError = reason { true } else { false });\n });\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_wrong_sender_comp_id_in_logon_response() {\n define_dictionary!(Logon, Logout, Reject,);\n\n //Connect and attempt logon.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server(build_dictionary());\n\n let message = new_logon_message();\n client.send_message(connection, message);\n let _ = test_server.recv_message::<Logon>();\n\n //Respond with a logon messaging containing the wrong SenderCompID.\n let mut message = new_logon_message();\n message.sender_comp_id = b\"unknown\".to_vec();\n test_server.send_message(message);\n\n //Confirm client sends Reject, Logout, and then disconnects.\n let message = test_server.recv_message::<Reject>();\n assert_eq!(message.msg_seq_num, 2);\n assert_eq!(message.ref_seq_num, 1);\n assert_eq!(\n message.session_reject_reason.unwrap(),\n SessionRejectReason::CompIDProblem\n );\n assert_eq!(message.text, b\"CompID problem\".to_vec());\n\n let message = test_server.recv_message::<Logout>();\n assert_eq!(message.text, b\"SenderCompID is wrong\".to_vec());\n\n engine_poll_event!(client,EngineEvent::MessageRejected(msg_connection,rejected_message) => {\n assert_eq!(msg_connection,connection);\n\n let message = rejected_message.as_any().downcast_ref::<Logon>().expect(\"Not expected message type\").clone();\n assert_eq!(message.msg_seq_num,1);\n assert_eq!(message.sender_comp_id,b\"unknown\".to_vec());\n });\n\n engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => {\n assert_eq!(terminated_connection,connection);\n assert!(if let ConnectionTerminatedReason::SenderCompIDWrongError = reason { true } else { false });\n });\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_wrong_target_comp_id_in_logon_response() {\n define_dictionary!(Logon, Logout, Reject,);\n\n //Connect and attempt logon.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server(build_dictionary());\n\n let message = new_logon_message();\n client.send_message(connection, message);\n let _ = test_server.recv_message::<Logon>();\n\n //Respond with a logon messaging containing the wrong TargetCompID.\n let mut message = new_logon_message();\n message.target_comp_id = b\"unknown\".to_vec();\n test_server.send_message(message);\n\n //Confirm client sends Reject, Logout, and then disconnects.\n let message = test_server.recv_message::<Reject>();\n assert_eq!(message.msg_seq_num, 2);\n assert_eq!(message.ref_seq_num, 1);\n assert_eq!(\n message.session_reject_reason.unwrap(),\n SessionRejectReason::CompIDProblem\n );\n assert_eq!(message.text, b\"CompID problem\".to_vec());\n\n let message = test_server.recv_message::<Logout>();\n assert_eq!(message.text, b\"TargetCompID is wrong\".to_vec());\n\n engine_poll_event!(client,EngineEvent::MessageRejected(msg_connection,rejected_message) => {\n assert_eq!(msg_connection,connection);\n\n let message = rejected_message.as_any().downcast_ref::<Logon>().expect(\"Not expected message type\").clone();\n assert_eq!(message.msg_seq_num,1);\n assert_eq!(message.target_comp_id,b\"unknown\".to_vec());\n });\n\n engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => {\n assert_eq!(terminated_connection,connection);\n assert!(if let ConnectionTerminatedReason::TargetCompIDWrongError = reason { true } else { false });\n });\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_overflowing_inbound_messages_buffer_does_resume() {\n //To prevent the client thread from stalling when receiving messages faster than they can be\n //parsed, it will automatically stop receiving bytes and parsing them into messages once\n //INBOUND_MESSAGES_BUFFER_LEN_MAX messages have been parsed. This test makes sure the client\n //thread resumes parsing bytes that have already been read in but not parsed without waiting\n //for a new network notification.\n\n define_dictionary!(Logon, Heartbeat, TestRequest,);\n\n //Connect and logon.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server_and_logon(build_dictionary());\n\n //Send INBOUND_MESSAGES_BUFFER_LEN_MAX + 1 TestRequests (hopefully) merged into a single TCP\n //frame.\n let mut bytes = Vec::new();\n for x in 0..INBOUND_MESSAGES_BUFFER_LEN_MAX + 1 {\n let mut test_request_message = new_fixt_message!(TestRequest);\n test_request_message.msg_seq_num = (x + 2) as u64;\n test_request_message.test_req_id = b\"test\".to_vec();\n\n serialize_and_append_message(\n &test_request_message,\n FIXVersion::FIXT_1_1,\n MessageVersion::FIX50SP2,\n &mut bytes,\n );\n }\n assert!(bytes.len() < 1400); //Make sure the serialized body is reasonably likely to fit within the MTU.\n assert!(bytes.len() < INBOUND_BYTES_BUFFER_CAPACITY); //Make sure client thread can theoretically store all of the messages in a single recv().\n let bytes_written = test_server.stream.write(&bytes).unwrap();\n assert_eq!(bytes_written, bytes.len());\n\n //Make sure client acknowledges messages as normal.\n for x in 0..INBOUND_MESSAGES_BUFFER_LEN_MAX + 1 {\n let message = engine_poll_message!(client, connection, TestRequest);\n assert_eq!(message.msg_seq_num, (x + 2) as u64);\n\n let message = test_server.recv_message::<Heartbeat>();\n assert_eq!(message.msg_seq_num, (x + 2) as u64);\n }\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_sender_comp_id() {\n define_fixt_message!(TestMessage: b\"9999\" => {\n NOT_REQUIRED, text: Text [FIX50..],\n });\n\n define_dictionary!(Logon, Reject, TestMessage,);\n\n //FIXT.1.1: Make sure SenderCompID has to be the fourth field.\n {\n //Connect and logon.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server_and_logon_with_ver(\n FIXVersion::FIXT_1_1,\n MessageVersion::FIX50,\n build_dictionary(),\n );\n\n //Accept when SenderCompID is the fourth tag.\n let target_comp_id_fifth_tag_message = b\"8=FIXT.1.1\\x019=48\\x0135=9999\\x0149=TX\\x0156=TEST\\x0134=2\\x0152=20170105-01:01:01\\x0110=236\\x01\";\n let bytes_written = test_server\n .stream\n .write(target_comp_id_fifth_tag_message)\n .unwrap();\n assert_eq!(bytes_written, target_comp_id_fifth_tag_message.len());\n\n let message = engine_poll_message!(client, connection, TestMessage);\n assert_eq!(message.msg_seq_num, 2);\n assert_eq!(message.sender_comp_id, SERVER_SENDER_COMP_ID);\n\n //Reject when SenderCompID is the fifth tag.\n let sender_comp_id_fifth_tag_message = b\"8=FIXT.1.1\\x019=48\\x0135=9999\\x0156=TEST\\x0149=TX\\x0134=3\\x0152=20170105-01:01:01\\x0110=012\\x01\";\n let bytes_written = test_server\n .stream\n .write(sender_comp_id_fifth_tag_message)\n .unwrap();\n assert_eq!(bytes_written, sender_comp_id_fifth_tag_message.len());\n\n let message = test_server.recv_message::<Reject>();\n assert_eq!(message.msg_seq_num, 2);\n assert_eq!(\n message\n .session_reject_reason\n .expect(\"SessionRejectReason must be provided\"),\n SessionRejectReason::TagSpecifiedOutOfRequiredOrder\n );\n assert_eq!(message.text, b\"SenderCompID must be the 4th tag\".to_vec());\n\n engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => {\n assert_eq!(msg_connection,connection);\n assert!(if let ParseError::SenderCompIDNotFourthTag = parse_error { true } else { false });\n });\n\n //Reject when SenderCompID is missing.\n let missing_sender_comp_id_tag_message = b\"8=FIXT.1.1\\x019=50\\x0135=9999\\x0156=TEST\\x0134=10\\x0152=20170105-01:01:01\\x0110=086\\x01\";\n let bytes_written = test_server\n .stream\n .write(missing_sender_comp_id_tag_message)\n .unwrap();\n assert_eq!(bytes_written, missing_sender_comp_id_tag_message.len());\n\n let message = test_server.recv_message::<Reject>();\n assert_eq!(message.msg_seq_num, 3);\n assert_eq!(\n message\n .session_reject_reason\n .expect(\"SessionRejectReason must be provided\"),\n SessionRejectReason::TagSpecifiedOutOfRequiredOrder\n );\n assert_eq!(message.text, b\"SenderCompID must be the 4th tag\".to_vec());\n\n engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => {\n assert_eq!(msg_connection,connection);\n assert!(if let ParseError::SenderCompIDNotFourthTag = parse_error { true } else { false });\n });\n }\n\n //FIX.4.0: Make sure SenderCompID does not have to be the fourth field.\n {\n //Connect and logon.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server_and_logon_with_ver(\n FIXVersion::FIX_4_0,\n MessageVersion::FIX40,\n build_dictionary(),\n );\n\n //Accept when SenderCompID is the fourth tag.\n let target_comp_id_fifth_tag_message = b\"8=FIX.4.0\\x019=48\\x0135=9999\\x0149=TX\\x0156=TEST\\x0134=2\\x0152=20170105-01:01:01\\x0110=154\\x01\";\n let bytes_written = test_server\n .stream\n .write(target_comp_id_fifth_tag_message)\n .unwrap();\n assert_eq!(bytes_written, target_comp_id_fifth_tag_message.len());\n\n let message = engine_poll_message!(client, connection, TestMessage);\n assert_eq!(message.msg_seq_num, 2);\n assert_eq!(message.sender_comp_id, SERVER_SENDER_COMP_ID);\n\n //Accept when SenderCompID is the fifth tag.\n let sender_comp_id_fifth_tag_message = b\"8=FIX.4.0\\x019=48\\x0135=9999\\x0156=TEST\\x0149=TX\\x0134=3\\x0152=20170105-01:01:01\\x0110=155\\x01\";\n let bytes_written = test_server\n .stream\n .write(sender_comp_id_fifth_tag_message)\n .unwrap();\n assert_eq!(bytes_written, sender_comp_id_fifth_tag_message.len());\n\n let message = engine_poll_message!(client, connection, TestMessage);\n assert_eq!(message.msg_seq_num, 3);\n assert_eq!(message.sender_comp_id, SERVER_SENDER_COMP_ID);\n\n //Reject when SenderCompID is missing.\n let missing_sender_comp_id_tag_message = b\"8=FIX.4.0\\x019=42\\x0135=9999\\x0156=TEST\\x0134=4\\x0152=20170105-01:01:01\\x0110=063\\x01\";\n let bytes_written = test_server\n .stream\n .write(missing_sender_comp_id_tag_message)\n .unwrap();\n assert_eq!(bytes_written, missing_sender_comp_id_tag_message.len());\n\n let message = test_server.recv_message::<Reject>();\n assert_eq!(message.msg_seq_num, 2);\n assert_eq!(message.text, b\"Required tag missing\".to_vec());\n\n engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => {\n assert_eq!(msg_connection,connection);\n assert!(if let ParseError::MissingRequiredTag(ref tag,_) = parse_error { *tag == SenderCompID::tag() } else { false });\n });\n }\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_target_comp_id() {\n define_fixt_message!(TestMessage: b\"9999\" => {\n NOT_REQUIRED, text: Text [FIX50..],\n });\n\n define_dictionary!(Logon, Reject, TestMessage,);\n\n //FIXT.1.1: Make sure TargetCompID has to be the fifth field.\n {\n //Connect and logon.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server_and_logon_with_ver(\n FIXVersion::FIXT_1_1,\n MessageVersion::FIX50,\n build_dictionary(),\n );\n\n //Accept when TargetCompID is the fifth tag.\n let target_comp_id_fifth_tag_message = b\"8=FIXT.1.1\\x019=48\\x0135=9999\\x0149=TX\\x0156=TEST\\x0134=2\\x0152=20170105-01:01:01\\x0110=236\\x01\";\n let bytes_written = test_server\n .stream\n .write(target_comp_id_fifth_tag_message)\n .unwrap();\n assert_eq!(bytes_written, target_comp_id_fifth_tag_message.len());\n\n let message = engine_poll_message!(client, connection, TestMessage);\n assert_eq!(message.msg_seq_num, 2);\n assert_eq!(message.target_comp_id, SERVER_TARGET_COMP_ID);\n\n //Reject when TargetCompID is the sixth tag.\n let target_comp_id_sixth_tag_message = b\"8=FIXT.1.1\\x019=48\\x0135=9999\\x0149=TX\\x0134=3\\x0156=TEST\\x0152=20170105-01:01:01\\x0110=237\\x01\";\n let bytes_written = test_server\n .stream\n .write(target_comp_id_sixth_tag_message)\n .unwrap();\n assert_eq!(bytes_written, target_comp_id_sixth_tag_message.len());\n\n let message = test_server.recv_message::<Reject>();\n assert_eq!(message.msg_seq_num, 2);\n assert_eq!(\n message\n .session_reject_reason\n .expect(\"SessionRejectReason must be provided\"),\n SessionRejectReason::TagSpecifiedOutOfRequiredOrder\n );\n assert_eq!(message.text, b\"TargetCompID must be the 5th tag\".to_vec());\n\n engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => {\n assert_eq!(msg_connection,connection);\n assert!(if let ParseError::TargetCompIDNotFifthTag = parse_error { true } else { false });\n });\n\n //Reject when TargetCompID is missing.\n let missing_target_comp_id_tag_message =\n b\"8=FIXT.1.1\\x019=59\\x0135=9999\\x0149=TX\\x0134=3\\x0152=20170105-01:01:01\\x0110=086\\x01\";\n let bytes_written = test_server\n .stream\n .write(missing_target_comp_id_tag_message)\n .unwrap();\n assert_eq!(bytes_written, missing_target_comp_id_tag_message.len());\n\n let message = test_server.recv_message::<Reject>();\n assert_eq!(message.msg_seq_num, 3);\n assert_eq!(\n message\n .session_reject_reason\n .expect(\"SessionRejectReason must be provided\"),\n SessionRejectReason::TagSpecifiedOutOfRequiredOrder\n );\n assert_eq!(message.text, b\"TargetCompID must be the 5th tag\".to_vec());\n\n engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => {\n assert_eq!(msg_connection,connection);\n assert!(if let ParseError::TargetCompIDNotFifthTag = parse_error { true } else { false });\n });\n }\n\n //FIX.4.0: Make sure TargetCompID does not have to be the fifth field.\n {\n //Connect and logon.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server_and_logon_with_ver(\n FIXVersion::FIX_4_0,\n MessageVersion::FIX40,\n build_dictionary(),\n );\n\n //Accept when TargetCompID is the fifth tag.\n let target_comp_id_fifth_tag_message = b\"8=FIX.4.0\\x019=48\\x0135=9999\\x0149=TX\\x0156=TEST\\x0134=2\\x0152=20170105-01:01:01\\x0110=154\\x01\";\n let bytes_written = test_server\n .stream\n .write(target_comp_id_fifth_tag_message)\n .unwrap();\n assert_eq!(bytes_written, target_comp_id_fifth_tag_message.len());\n\n let message = engine_poll_message!(client, connection, TestMessage);\n assert_eq!(message.msg_seq_num, 2);\n assert_eq!(message.target_comp_id, SERVER_TARGET_COMP_ID);\n\n //Accept when TargetCompID is the sixth tag.\n let target_comp_id_sixth_tag_message = b\"8=FIX.4.0\\x019=48\\x0135=9999\\x0149=TX\\x0134=3\\x0156=TEST\\x0152=20170105-01:01:01\\x0110=155\\x01\";\n let bytes_written = test_server\n .stream\n .write(target_comp_id_sixth_tag_message)\n .unwrap();\n assert_eq!(bytes_written, target_comp_id_sixth_tag_message.len());\n\n let message = engine_poll_message!(client, connection, TestMessage);\n assert_eq!(message.msg_seq_num, 3);\n assert_eq!(message.target_comp_id, SERVER_TARGET_COMP_ID);\n\n //Reject when TargetCompID is missing.\n let missing_target_comp_id_tag_message =\n b\"8=FIX.4.0\\x019=40\\x0135=9999\\x0149=TX\\x0134=4\\x0152=20170105-01:01:01\\x0110=171\\x01\";\n let bytes_written = test_server\n .stream\n .write(missing_target_comp_id_tag_message)\n .unwrap();\n assert_eq!(bytes_written, missing_target_comp_id_tag_message.len());\n\n let message = test_server.recv_message::<Reject>();\n assert_eq!(message.msg_seq_num, 2);\n assert_eq!(message.text, b\"Required tag missing\".to_vec());\n\n engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => {\n assert_eq!(msg_connection,connection);\n assert!(if let ParseError::MissingRequiredTag(ref tag,_) = parse_error { *tag == TargetCompID::tag() } else { false });\n });\n }\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_default_appl_ver_id() {\n define_fixt_message!(TestMessage: b\"9999\" => {\n REQUIRED, text: Text [FIX50..],\n });\n\n define_fixt_message!(TestMessage2: b\"9999\" => {\n REQUIRED, text: Text [FIX40..],\n });\n\n define_dictionary!(Logon, TestMessage,);\n\n //Connect and logon.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server_and_logon_with_ver(\n FIXVersion::FIXT_1_1,\n MessageVersion::FIX40,\n build_dictionary(),\n );\n\n //Make sure DefaultApplVerID is respected for sent messages.\n {\n //Make client send a TestMessage.\n let mut message = new_fixt_message!(TestMessage);\n message.text = b\"text\".to_vec();\n client.send_message(connection, message);\n\n //Confirm text field was excluded by server due to requiring >= FIX50 but default is FIX40.\n let message = test_server.recv_message::<TestMessage>();\n assert_eq!(message.text.len(), 0);\n }\n\n //Make sure DefaultApplVerID is respected for received messages.\n {\n //Make server send a TestMessage.\n let mut message = new_fixt_message!(TestMessage);\n message.msg_seq_num = 2;\n message.text = b\"text\".to_vec();\n test_server.send_message(message);\n\n //Confirm text field was excluded by client due to requiring >= FIX50 but default is FIX40.\n let message = engine_poll_message!(client, connection, TestMessage);\n assert_eq!(message.text.len(), 0);\n\n //Make sever send a TestMessage again but force the text field to be sent.\n let mut message = new_fixt_message!(TestMessage2);\n message.msg_seq_num = 3;\n message.text = b\"text\".to_vec();\n test_server.send_message(message);\n\n //Make sure message is considered invalid.\n engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => {\n assert_eq!(msg_connection,connection);\n assert!(if let ParseError::UnknownTag(ref tag) = parse_error { *tag == FieldTag(58) } else { false });\n });\n }\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_appl_ver_id() {\n define_fixt_message!(TestMessage: b\"9999\" => {\n REQUIRED, text: Text [FIX50..],\n });\n\n define_dictionary!(Logon, Reject, TestMessage,);\n\n //Make sure when ApplVerID is specified after the sixth field, Engine responds with an\n //appropriate Reject message and notification.\n {\n //Connect and logon.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server_and_logon(build_dictionary());\n\n //Send TestMessage with ApplVerID field as the seventh tag.\n let appl_ver_id_seventh_tag_message = b\"8=FIXT.1.1\\x019=44\\x0135=9999\\x0149=SERVER\\x0156=CLIENT\\x0134=2\\x011128=9\\x0110=000\\x01\";\n let bytes_written = test_server\n .stream\n .write(appl_ver_id_seventh_tag_message)\n .unwrap();\n assert_eq!(bytes_written, appl_ver_id_seventh_tag_message.len());\n\n //Make sure Engine responds with an appropriate reject.\n let message = test_server.recv_message::<Reject>();\n assert_eq!(\n message.session_reject_reason.unwrap(),\n SessionRejectReason::TagSpecifiedOutOfRequiredOrder\n );\n assert_eq!(\n message.text,\n b\"ApplVerID must be the 6th tag if specified\".to_vec()\n );\n\n //Make sure Engine indicates that it rejected the message.\n engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => {\n assert_eq!(msg_connection,connection);\n assert!(if let ParseError::ApplVerIDNotSixthTag = parse_error { true } else { false });\n });\n }\n\n //Make sure ApplVerID overrides the default message version set in the initial Logon message.\n {\n //Connect and logon.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server_and_logon(build_dictionary());\n\n //Send TestMessage with ApplVerID < FIX50 and without text field.\n let mut message = new_fixt_message!(TestMessage);\n message.msg_seq_num = 2;\n message.appl_ver_id = Some(MessageVersion::FIX40);\n test_server.send_message_with_ver(\n FIXVersion::FIXT_1_1,\n message.appl_ver_id.unwrap(),\n message,\n );\n\n //Confirm Engine accepted message correctly.\n let message = engine_poll_message!(client, connection, TestMessage);\n assert_eq!(message.appl_ver_id, Some(MessageVersion::FIX40));\n assert_eq!(message.text.len(), 0);\n\n //Send TestMessage with ApplVerID < FIX50 and with text field.\n let mut message = new_fixt_message!(TestMessage);\n message.msg_seq_num = 3;\n message.appl_ver_id = Some(MessageVersion::FIX40);\n message.text = b\"text\".to_vec();\n test_server.send_message_with_ver(FIXVersion::FIXT_1_1, MessageVersion::FIX50SP2, message); //Force text field to be included.\n\n //Confirm Engine rejected message because text field is unsupported for this version.\n let message = test_server.recv_message::<Reject>();\n assert_eq!(\n message.session_reject_reason.unwrap(),\n SessionRejectReason::TagNotDefinedForThisMessageType\n );\n assert_eq!(\n message.text,\n b\"Tag not defined for this message type\".to_vec()\n );\n\n engine_poll_event!(client,EngineEvent::MessageReceivedGarbled(msg_connection,parse_error) => {\n assert_eq!(msg_connection,connection);\n assert!(if let ParseError::UnexpectedTag(ref tag) = parse_error { *tag == Text::tag() } else { false });\n });\n }\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_message_type_default_application_version() {\n define_fixt_message!(TestMessage: b\"9999\" => {\n REQUIRED, text: Text [FIX50SP1..],\n });\n\n define_dictionary!(Logon, Reject, TestMessage,);\n\n //Connect.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server(build_dictionary());\n\n //Logon.\n let mut logon_message = new_logon_message();\n logon_message.default_appl_ver_id = MessageVersion::FIX50;\n client.send_message_box_with_message_version(\n connection,\n MessageVersion::FIX50SP2,\n Box::new(logon_message),\n );\n let message = test_server.recv_message::<Logon>();\n assert_eq!(message.msg_seq_num, 1);\n\n let mut response_message = new_fixt_message!(Logon);\n response_message.encrypt_method = message.encrypt_method;\n response_message.heart_bt_int = message.heart_bt_int;\n response_message.default_appl_ver_id = message.default_appl_ver_id;\n let mut msg_type_grp = MsgTypeGrp::new();\n msg_type_grp.ref_msg_type = TestMessage::msg_type().to_vec();\n msg_type_grp.ref_appl_ver_id = Some(MessageVersion::FIX50SP1);\n msg_type_grp.msg_direction = MsgDirection::Send;\n msg_type_grp.default_ver_indicator = true;\n response_message.no_msg_types.push(Box::new(msg_type_grp));\n test_server.send_message_with_ver(\n FIXVersion::FIXT_1_1,\n MessageVersion::FIX50SP2,\n response_message,\n );\n engine_poll_event!(client,EngineEvent::SessionEstablished(_) => {});\n let message = engine_poll_message!(client, connection, Logon);\n assert_eq!(message.msg_seq_num, 1);\n\n //Make sure specifying a message type specific default application version overrides the\n //default message version.\n {\n //Send TestMessage text field.\n let mut message = new_fixt_message!(TestMessage);\n message.msg_seq_num = 2;\n message.text = b\"test\".to_vec();\n test_server.send_message_with_ver(FIXVersion::FIXT_1_1, MessageVersion::FIX50SP1, message);\n\n //Confirm Engine accepted message correctly.\n let message = engine_poll_message!(client, connection, TestMessage);\n assert_eq!(\n message.meta.unwrap().message_version,\n MessageVersion::FIX50SP1\n ); //Set by parser what it parsed message as.\n assert_eq!(message.text, b\"test\");\n }\n\n //Make sure ApplVerID overrides the message type specific default application version.\n {\n //Send TestMessage with explicit ApplVerID < FIX50 and without text field.\n let mut message = new_fixt_message!(TestMessage);\n message.msg_seq_num = 3;\n message.appl_ver_id = Some(MessageVersion::FIX40);\n test_server.send_message_with_ver(\n FIXVersion::FIXT_1_1,\n message.appl_ver_id.unwrap(),\n message,\n );\n\n //Confirm Engine accepted message correctly.\n let message = engine_poll_message!(client, connection, TestMessage);\n assert_eq!(message.meta.unwrap().message_version, MessageVersion::FIX40);\n assert_eq!(message.text.len(), 0);\n }\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_respond_to_test_request_immediately_after_logon() {\n //Special processing is required to adjust which messages are acceptable after Logon is\n //received. But the IO processing is level based so the event loop might not be notified of\n //remaining data. This test makes sure the remaining data is processed immediately. In\n //practice, the worst case scenario is some type of timeout would trigger a Heartbeat or a\n //TestRequest that would cause the remaining data to be read.\n\n define_dictionary!(Logon, Heartbeat, TestRequest,);\n\n //Connect to server.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server(build_dictionary());\n\n //Have client send Logon.\n client.send_message_box(connection, Box::new(new_logon_message()));\n let message = test_server.recv_message::<Logon>();\n assert_eq!(message.msg_seq_num, 1);\n\n //Respond with Logon and TestRequest (hopefully) merged into a single TCP packet.\n let mut logon_message = new_fixt_message!(Logon);\n logon_message.msg_seq_num = 1;\n logon_message.encrypt_method = message.encrypt_method;\n logon_message.heart_bt_int = message.heart_bt_int;\n logon_message.default_appl_ver_id = message.default_appl_ver_id;\n\n let mut test_request_message = new_fixt_message!(TestRequest);\n test_request_message.msg_seq_num = 2;\n test_request_message.test_req_id = b\"test\".to_vec();\n\n let mut bytes = Vec::new();\n serialize_and_append_message(\n &logon_message,\n FIXVersion::FIXT_1_1,\n MessageVersion::FIX50SP2,\n &mut bytes,\n );\n serialize_and_append_message(\n &test_request_message,\n FIXVersion::FIXT_1_1,\n MessageVersion::FIX50SP2,\n &mut bytes,\n );\n assert!(bytes.len() < 1400); //Make sure the serialized body is reasonably likely to fit within the MTU.\n let bytes_written = test_server.stream.write(&bytes).unwrap();\n assert_eq!(bytes_written, bytes.len());\n\n //Make sure client acknowledges both as normal.\n engine_poll_event!(client,EngineEvent::SessionEstablished(_) => {});\n let message = engine_poll_message!(client, connection, Logon);\n assert_eq!(message.msg_seq_num, 1);\n let message = engine_poll_message!(client, connection, TestRequest);\n assert_eq!(message.msg_seq_num, 2);\n\n let message = test_server.recv_message::<Heartbeat>();\n assert_eq!(message.msg_seq_num, 2);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_respect_default_appl_ver_id_in_test_request_immediately_after_logon() {\n //This is very similar to test_respond_to_test_request_immediately_after_logon() above except\n //it makes sure the DefaultApplVerID is used correctly for the message right after Logon.\n\n define_fixt_message!(TestMessage: b\"9999\" => {\n REQUIRED, text: Text [FIX50SP2..],\n });\n\n define_dictionary!(Logon, Logout, Reject, TestMessage,);\n\n //Connect to server.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server(build_dictionary());\n\n //Have client send Logon.\n let mut logon_message = new_logon_message();\n logon_message.default_appl_ver_id = MessageVersion::FIX50SP2;\n client.send_message_box(connection, Box::new(logon_message));\n let message = test_server.recv_message::<Logon>();\n assert_eq!(message.msg_seq_num, 1);\n\n //Respond with Logon and TestMessage (hopefully) merged into a single TCP packet.\n let mut logon_message = new_fixt_message!(Logon);\n logon_message.msg_seq_num = 1;\n logon_message.encrypt_method = message.encrypt_method;\n logon_message.heart_bt_int = message.heart_bt_int;\n logon_message.default_appl_ver_id = message.default_appl_ver_id;\n\n let mut test_message = new_fixt_message!(TestMessage);\n test_message.msg_seq_num = 2;\n test_message.text = b\"test\".to_vec();\n\n let mut bytes = Vec::new();\n serialize_and_append_message(\n &logon_message,\n FIXVersion::FIXT_1_1,\n MessageVersion::FIX50SP2,\n &mut bytes,\n );\n serialize_and_append_message(\n &test_message,\n FIXVersion::FIXT_1_1,\n MessageVersion::FIX50SP2,\n &mut bytes,\n );\n assert!(bytes.len() < 1400); //Make sure the serialized body is reasonably likely to fit within the MTU.\n let bytes_written = test_server.stream.write(&bytes).unwrap();\n assert_eq!(bytes_written, bytes.len());\n\n //Make sure client acknowledges Logon as normal.\n engine_poll_event!(client,EngineEvent::SessionEstablished(_) => {});\n let message = engine_poll_message!(client, connection, Logon);\n assert_eq!(message.msg_seq_num, 1);\n\n //Make sure client applies DefaultApplVerID version to TestMessage so that the Text field is\n //parsed.\n let message = engine_poll_message!(client, connection, TestMessage);\n assert_eq!(message.msg_seq_num, 2);\n assert_eq!(message.text, b\"test\".to_vec());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_logout_and_terminate_wrong_versioned_test_request_immediately_after_logon() {\n //This is very similar to test_respond_to_test_request_immediately_after_logon() above except\n //it makes sure using the wrong FIX version follows the typical Logout and disconnect as\n //expected.\n\n define_dictionary!(Logon, Logout, TestRequest,);\n\n //Connect to server.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server(build_dictionary());\n\n //Have client send Logon.\n client.send_message_box(connection, Box::new(new_logon_message()));\n let message = test_server.recv_message::<Logon>();\n assert_eq!(message.msg_seq_num, 1);\n\n //Respond with Logon and TestRequest (hopefully) merged into a single TCP packet.\n let mut logon_message = new_fixt_message!(Logon);\n logon_message.msg_seq_num = 1;\n logon_message.encrypt_method = message.encrypt_method;\n logon_message.heart_bt_int = message.heart_bt_int;\n logon_message.default_appl_ver_id = message.default_appl_ver_id;\n\n let mut test_request_message = new_fixt_message!(TestRequest);\n test_request_message.msg_seq_num = 2;\n test_request_message.test_req_id = b\"test\".to_vec();\n\n let mut bytes = Vec::new();\n serialize_and_append_message(\n &logon_message,\n FIXVersion::FIXT_1_1,\n MessageVersion::FIX50SP2,\n &mut bytes,\n );\n serialize_and_append_message(\n &test_request_message,\n FIXVersion::FIX_4_2,\n MessageVersion::FIX42,\n &mut bytes,\n );\n assert!(bytes.len() < 1400); //Make sure the serialized body is reasonably likely to fit within the MTU.\n let bytes_written = test_server.stream.write(&bytes).unwrap();\n assert_eq!(bytes_written, bytes.len());\n\n //Make sure client acknowledges Logon as normal.\n engine_poll_event!(client,EngineEvent::SessionEstablished(_) => {});\n let message = engine_poll_message!(client, connection, Logon);\n assert_eq!(message.msg_seq_num, 1);\n\n //Make sure Engine sends Logout and then disconnects.\n let message = test_server.recv_message::<Logout>();\n assert_eq!(\n message.text,\n b\"BeginStr is wrong, expected 'FIXT.1.1' but received 'FIX.4.2'\".to_vec()\n );\n\n engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => {\n assert_eq!(terminated_connection,connection);\n assert!(\n if let ConnectionTerminatedReason::BeginStrWrongError{received,expected} = reason {\n assert_eq!(received,FIXVersion::FIX_4_2);\n assert_eq!(expected,FIXVersion::FIXT_1_1);\n true\n }\n else {\n false\n }\n );\n });\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_max_message_size() {\n const MAX_MESSAGE_SIZE: u64 = 4096;\n\n define_fixt_message!(TestMessage: b\"9999\" => {\n REQUIRED, text: Text [FIX40..],\n });\n\n define_dictionary!(Logon, Logout, Reject, TestMessage,);\n\n fn message_length<T: Message>(message: &T) -> u64 {\n let mut buffer = ByteBuffer::new();\n message.read(FIXVersion::FIXT_1_1, MessageVersion::FIX50SP2, &mut buffer) as u64\n }\n\n //Make sure exceeding the MaxMessageSize in messages after Logon results in a Reject message.\n {\n //Connect to server.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server(build_dictionary());\n\n //Have client send Logon.\n let mut message = new_logon_message();\n message.max_message_size = MAX_MESSAGE_SIZE;\n client.send_message_box(connection, Box::new(message));\n let message = test_server.recv_message::<Logon>();\n assert_eq!(message.msg_seq_num, 1);\n assert_eq!(message.max_message_size, MAX_MESSAGE_SIZE);\n\n //Acknowledge Logon.\n let mut response_message = new_fixt_message!(Logon);\n response_message.encrypt_method = message.encrypt_method;\n response_message.heart_bt_int = message.heart_bt_int;\n response_message.default_appl_ver_id = message.default_appl_ver_id;\n test_server.send_message(response_message);\n engine_poll_event!(client,EngineEvent::SessionEstablished(_) => {});\n let message = engine_poll_message!(client, connection, Logon);\n assert_eq!(message.msg_seq_num, 1);\n\n //Try and send Engine a message exceeding MAX_MESSAGE_SIZE.\n let mut message = new_fixt_message!(TestMessage);\n message.msg_seq_num = 2;\n let current_message_len = message_length(&message);\n for _ in 0..(MAX_MESSAGE_SIZE - current_message_len) + 1 {\n message.text.push(b'A');\n }\n test_server.send_message(message);\n\n //Make sure Engine rejected the message.\n let message = test_server.recv_message::<Reject>();\n assert_eq!(message.msg_seq_num, 2);\n assert_eq!(message.ref_seq_num, 2);\n assert_eq!(\n message.session_reject_reason.unwrap(),\n SessionRejectReason::Other\n );\n let mut expected_error_text = b\"Message size exceeds MaxMessageSize=\".to_vec();\n expected_error_text.extend_from_slice(MAX_MESSAGE_SIZE.to_string().as_bytes());\n assert_eq!(message.text, expected_error_text);\n }\n\n //Make sure exceeding the MaxMessageSize in the Logon response results in the Engine just\n //disconnecting.\n {\n //Connect to server.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server(build_dictionary());\n\n //Have client send Logon.\n let mut message = new_logon_message();\n message.max_message_size = MAX_MESSAGE_SIZE;\n client.send_message_box(connection, Box::new(message));\n let message = test_server.recv_message::<Logon>();\n assert_eq!(message.msg_seq_num, 1);\n assert_eq!(message.max_message_size, MAX_MESSAGE_SIZE);\n\n //Respond with Logon message that exceeds MAX_MESSAGE_SIZE.\n let mut response_message = new_fixt_message!(Logon);\n response_message.encrypt_method = message.encrypt_method.clone();\n response_message.heart_bt_int = message.heart_bt_int;\n response_message.default_appl_ver_id = message.default_appl_ver_id;\n while message_length(&response_message) <= MAX_MESSAGE_SIZE {\n let mut msg_type_grp = MsgTypeGrp::new();\n msg_type_grp.ref_msg_type = b\"L\".to_vec();\n msg_type_grp.ref_appl_ver_id = Some(MessageVersion::FIX50SP1);\n msg_type_grp.msg_direction = MsgDirection::Send;\n response_message.no_msg_types.push(Box::new(msg_type_grp));\n }\n test_server.send_message(response_message);\n\n //Make sure Engine just disconnects.\n engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => {\n assert_eq!(terminated_connection,connection);\n assert!(if let ConnectionTerminatedReason::LogonParseError(parse_error) = reason {\n if let ParseError::MessageSizeTooBig = parse_error { true } else { false }\n }\n else {\n false\n });\n });\n }\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_block_read_when_write_blocks() {\n define_dictionary!(Logon, Heartbeat, Reject, ResendRequest, TestRequest,);\n\n //Send a bunch of messages to Engine without reading the responses. Engine should stop reading\n //until it can write again.\n {\n //Connect and Logon.\n let (mut test_server, client, _) =\n TestStream::setup_test_server_and_logon(build_dictionary());\n\n //Run a background thread to drain client events until they stop. The stopping indicates the\n //client has stopped accepting new messages.\n let client = Arc::new(Mutex::new(client)); //Keep client around even after thread ends.\n let client_clone = client.clone(); //Clone to be passed to thread.\n let thread_running = Arc::new(AtomicBool::new(true));\n let thread_running_clone = thread_running.clone();\n let thread_handle = thread::spawn(move || {\n let mut client = client_clone.lock().unwrap();\n while let Some(event) = client.poll(Duration::from_secs(2)) {\n match event {\n EngineEvent::ConnectionTerminated(_, _) => {\n panic!(\"Engine should not have terminated connection yet.\")\n }\n _ => {}\n }\n }\n\n thread_running_clone.store(false, Ordering::Relaxed);\n });\n\n //Flood client with TestRequest messages until thread notifies that messages are being\n //blocked.\n let mut outbound_msg_seq_num = 2;\n let now = Instant::now();\n let mut stop_writing = false;\n loop {\n if !thread_running.load(Ordering::Relaxed) {\n thread_handle.join().expect(\"Thread must be stopped.\");\n break;\n } else if now.elapsed() > Duration::from_secs(15) {\n panic!(\"Engine never blocked receiving of new messages.\");\n }\n\n if !stop_writing {\n let mut message = new_fixt_message!(TestRequest);\n message.msg_seq_num = outbound_msg_seq_num;\n message.test_req_id = b\"test\".to_vec();\n if let Err(bytes_not_written) =\n test_server.send_message_with_timeout(message, Duration::from_millis(10))\n {\n //Stop writing new messages because TCP indicated that the other side is\n //congested.\n stop_writing = true;\n\n if bytes_not_written > 0 {\n continue;\n }\n }\n\n outbound_msg_seq_num += 1;\n }\n }\n\n //Drain server's read buffer.\n loop {\n let message = test_server.recv_message::<Heartbeat>();\n if message.msg_seq_num == outbound_msg_seq_num - 1 {\n break;\n }\n }\n\n //Send gibberish that will force an incomplete message to be discarded.\n let _ = test_server.stream.write(b\"\\x0110=000\\x01=000\");\n\n //Make sure messages continue to flow again.\n let mut message = new_fixt_message!(TestRequest);\n message.msg_seq_num = outbound_msg_seq_num + 1;\n message.test_req_id = b\"final\".to_vec();\n test_server.send_message(message);\n\n let message = test_server.recv_fixt_message();\n let message = match message_to_enum(message) {\n MessageEnum::Heartbeat(message) => message,\n _ => Box::from(test_server.recv_message::<Heartbeat>()),\n };\n assert_eq!(message.test_req_id, b\"final\");\n }\n\n //Same as above but never drain the server's read buffer so the connection must eventually be\n //dropped.\n {\n //Connect and Logon.\n let (mut test_server, mut client, _) =\n TestStream::setup_test_server_and_logon(build_dictionary());\n\n //Flood client with TestRequest messages until Engine drops the connection.\n let mut outbound_msg_seq_num = 2;\n let now = Instant::now();\n let mut stop_writing = false;\n loop {\n if now.elapsed() > Duration::from_secs(30) {\n panic!(\"Engine never disconnected.\");\n }\n\n if let Some(EngineEvent::ConnectionTerminated(_, reason)) =\n client.poll(Duration::from_millis(0))\n {\n assert!(\n if let ConnectionTerminatedReason::SocketNotWritableTimeoutError = reason {\n true\n } else {\n false\n }\n );\n assert!(test_server.is_stream_closed(Duration::from_secs(3)));\n\n //Success! Engine disconnected.\n break;\n }\n\n if !stop_writing {\n let mut message = new_fixt_message!(TestRequest);\n message.msg_seq_num = outbound_msg_seq_num;\n message.test_req_id = b\"test\".to_vec();\n if let Err(_) =\n test_server.send_message_with_timeout(message, Duration::from_millis(10))\n {\n stop_writing = true;\n }\n\n outbound_msg_seq_num += 1;\n }\n }\n }\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_inbound_resend_loop_detection() {\n define_dictionary!(\n Logon,\n Logout,\n Heartbeat,\n ResendRequest,\n SequenceReset,\n TestRequest,\n );\n\n //Connect and logon.\n let (mut test_server, mut client, connection) =\n TestStream::setup_test_server_and_logon(build_dictionary());\n\n //Have server send TestRequest so Engine responds with a Heartbeat.\n let mut message = new_fixt_message!(TestRequest);\n message.msg_seq_num = 2;\n message.test_req_id = b\"test\".to_vec();\n test_server.send_message(message);\n engine_poll_message!(client, connection, TestRequest);\n let message = test_server.recv_message::<Heartbeat>();\n assert_eq!(message.msg_seq_num, 2);\n assert_eq!(message.test_req_id, b\"test\");\n\n //Have server ignore the Heartbeat response by sending ResendRequest a few times. The client\n //should eventually logout and disconnect.\n const BASE_MSG_SEQ_NUM: u64 = 3;\n for x in 0..AUTO_DISCONNECT_AFTER_INBOUND_RESEND_REQUEST_LOOP_COUNT {\n let mut message = new_fixt_message!(ResendRequest);\n message.msg_seq_num = BASE_MSG_SEQ_NUM + x;\n message.begin_seq_no = 2;\n message.end_seq_no = 0;\n test_server.send_message(message);\n\n engine_gap_fill_resend_request!(client, connection, 2..3);\n let _ = engine_poll_message!(client, connection, ResendRequest);\n\n let message = test_server.recv_message::<SequenceReset>();\n assert_eq!(message.gap_fill_flag, true);\n assert_eq!(message.new_seq_no, 3);\n assert_eq!(message.msg_seq_num, 2);\n }\n\n let mut message = new_fixt_message!(ResendRequest);\n message.msg_seq_num =\n BASE_MSG_SEQ_NUM + AUTO_DISCONNECT_AFTER_INBOUND_RESEND_REQUEST_LOOP_COUNT;\n message.begin_seq_no = 2;\n message.end_seq_no = 0;\n test_server.send_message(message);\n\n let message = test_server.recv_message::<Logout>();\n assert_eq!(\n message.text,\n b\"Detected ResendRequest loop for BeginSeqNo 2\".to_vec()\n );\n\n engine_poll_event!(client,EngineEvent::ConnectionTerminated(terminated_connection,reason) => {\n assert_eq!(terminated_connection,connection);\n assert!(if let ConnectionTerminatedReason::InboundResendRequestLoopError = reason { true } else { false });\n });\n assert!(test_server.is_stream_closed(Duration::from_secs(3)));\n}\n}" ]
f703b5e959ca4ddfd38c560d802b75e29f041540
3,520
rs
Rust
compiler/crates/schema/tests/build_schema_test.rs
erictaylor/relay
fa1c75d263b8257cfbea541e8bcfa1bc69e75367
[ "MIT" ]
null
null
null
compiler/crates/schema/tests/build_schema_test.rs
erictaylor/relay
fa1c75d263b8257cfbea541e8bcfa1bc69e75367
[ "MIT" ]
null
null
null
compiler/crates/schema/tests/build_schema_test.rs
erictaylor/relay
fa1c75d263b8257cfbea541e8bcfa1bc69e75367
[ "MIT" ]
null
null
null
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @generated SignedSource<<d7c16c29bd10615430a1b09fe261a111>> */ mod build_schema; use build_schema::transform_fixture; use fixture_tests::test_fixture; #[test] fn directives_for_external_types() { let input = include_str!("build_schema/fixtures/directives-for-external-types.graphql"); let expected = include_str!("build_schema/fixtures/directives-for-external-types.expected"); test_fixture(transform_fixture, "directives-for-external-types.graphql", "build_schema/fixtures/directives-for-external-types.expected", input, expected); } #[test] fn interface_implements_interface() { let input = include_str!("build_schema/fixtures/interface-implements-interface.graphql"); let expected = include_str!("build_schema/fixtures/interface-implements-interface.expected"); test_fixture(transform_fixture, "interface-implements-interface.graphql", "build_schema/fixtures/interface-implements-interface.expected", input, expected); } #[test] fn invalid_implements_non_interface() { let input = include_str!("build_schema/fixtures/invalid-implements-non-interface.graphql"); let expected = include_str!("build_schema/fixtures/invalid-implements-non-interface.expected"); test_fixture(transform_fixture, "invalid-implements-non-interface.graphql", "build_schema/fixtures/invalid-implements-non-interface.expected", input, expected); } #[test] fn invalid_object_extension_duplicated_server_field() { let input = include_str!("build_schema/fixtures/invalid-object-extension-duplicated-server-field.graphql"); let expected = include_str!("build_schema/fixtures/invalid-object-extension-duplicated-server-field.expected"); test_fixture(transform_fixture, "invalid-object-extension-duplicated-server-field.graphql", "build_schema/fixtures/invalid-object-extension-duplicated-server-field.expected", input, expected); } #[test] fn invalid_object_extension_local_duplicated_fields() { let input = include_str!("build_schema/fixtures/invalid-object-extension-local-duplicated-fields.graphql"); let expected = include_str!("build_schema/fixtures/invalid-object-extension-local-duplicated-fields.expected"); test_fixture(transform_fixture, "invalid-object-extension-local-duplicated-fields.graphql", "build_schema/fixtures/invalid-object-extension-local-duplicated-fields.expected", input, expected); } #[test] fn invalid_sdl() { let input = include_str!("build_schema/fixtures/invalid-sdl.graphql"); let expected = include_str!("build_schema/fixtures/invalid-sdl.expected"); test_fixture(transform_fixture, "invalid-sdl.graphql", "build_schema/fixtures/invalid-sdl.expected", input, expected); } #[test] fn invalid_type_reference() { let input = include_str!("build_schema/fixtures/invalid-type-reference.graphql"); let expected = include_str!("build_schema/fixtures/invalid-type-reference.expected"); test_fixture(transform_fixture, "invalid-type-reference.graphql", "build_schema/fixtures/invalid-type-reference.expected", input, expected); } #[test] fn kitchen_sink() { let input = include_str!("build_schema/fixtures/kitchen-sink.graphql"); let expected = include_str!("build_schema/fixtures/kitchen-sink.expected"); test_fixture(transform_fixture, "kitchen-sink.graphql", "build_schema/fixtures/kitchen-sink.expected", input, expected); }
50.285714
196
0.785795
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn directives_for_external_types() {\n let input = include_str!(\"build_schema/fixtures/directives-for-external-types.graphql\");\n let expected = include_str!(\"build_schema/fixtures/directives-for-external-types.expected\");\n test_fixture(transform_fixture, \"directives-for-external-types.graphql\", \"build_schema/fixtures/directives-for-external-types.expected\", input, expected);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn interface_implements_interface() {\n let input = include_str!(\"build_schema/fixtures/interface-implements-interface.graphql\");\n let expected = include_str!(\"build_schema/fixtures/interface-implements-interface.expected\");\n test_fixture(transform_fixture, \"interface-implements-interface.graphql\", \"build_schema/fixtures/interface-implements-interface.expected\", input, expected);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn invalid_implements_non_interface() {\n let input = include_str!(\"build_schema/fixtures/invalid-implements-non-interface.graphql\");\n let expected = include_str!(\"build_schema/fixtures/invalid-implements-non-interface.expected\");\n test_fixture(transform_fixture, \"invalid-implements-non-interface.graphql\", \"build_schema/fixtures/invalid-implements-non-interface.expected\", input, expected);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn invalid_object_extension_duplicated_server_field() {\n let input = include_str!(\"build_schema/fixtures/invalid-object-extension-duplicated-server-field.graphql\");\n let expected = include_str!(\"build_schema/fixtures/invalid-object-extension-duplicated-server-field.expected\");\n test_fixture(transform_fixture, \"invalid-object-extension-duplicated-server-field.graphql\", \"build_schema/fixtures/invalid-object-extension-duplicated-server-field.expected\", input, expected);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn invalid_object_extension_local_duplicated_fields() {\n let input = include_str!(\"build_schema/fixtures/invalid-object-extension-local-duplicated-fields.graphql\");\n let expected = include_str!(\"build_schema/fixtures/invalid-object-extension-local-duplicated-fields.expected\");\n test_fixture(transform_fixture, \"invalid-object-extension-local-duplicated-fields.graphql\", \"build_schema/fixtures/invalid-object-extension-local-duplicated-fields.expected\", input, expected);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn invalid_sdl() {\n let input = include_str!(\"build_schema/fixtures/invalid-sdl.graphql\");\n let expected = include_str!(\"build_schema/fixtures/invalid-sdl.expected\");\n test_fixture(transform_fixture, \"invalid-sdl.graphql\", \"build_schema/fixtures/invalid-sdl.expected\", input, expected);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn invalid_type_reference() {\n let input = include_str!(\"build_schema/fixtures/invalid-type-reference.graphql\");\n let expected = include_str!(\"build_schema/fixtures/invalid-type-reference.expected\");\n test_fixture(transform_fixture, \"invalid-type-reference.graphql\", \"build_schema/fixtures/invalid-type-reference.expected\", input, expected);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn kitchen_sink() {\n let input = include_str!(\"build_schema/fixtures/kitchen-sink.graphql\");\n let expected = include_str!(\"build_schema/fixtures/kitchen-sink.expected\");\n test_fixture(transform_fixture, \"kitchen-sink.graphql\", \"build_schema/fixtures/kitchen-sink.expected\", input, expected);\n}\n}" ]
f703ec840c718d32d516a93b9d896a40e16baf40
3,470
rs
Rust
src/endpoints/dns/edit.rs
muchobien/porkbun-rs
a5773ffedf1a761da22907bb3c7ccb1cccb08bcb
[ "MIT" ]
2
2021-06-04T15:00:03.000Z
2021-07-17T10:51:05.000Z
src/endpoints/dns/edit.rs
muchobien/porkbun-rs
a5773ffedf1a761da22907bb3c7ccb1cccb08bcb
[ "MIT" ]
1
2021-06-04T19:36:09.000Z
2021-06-04T19:36:09.000Z
src/endpoints/dns/edit.rs
muchobien/porkbun-rs
a5773ffedf1a761da22907bb3c7ccb1cccb08bcb
[ "MIT" ]
null
null
null
use super::{fill_body_with_record, DnsContent}; use crate::api::Endpoint; use derive_builder::Builder; use http::Method; use serde_json::{Map, Value}; use std::borrow::Cow; #[derive(Debug, Builder)] #[builder(setter(strip_option))] pub struct EditDns<'a> { #[builder(setter(into))] record: DnsContent, #[builder(setter(into))] domain: Cow<'a, str>, #[builder(setter(into))] id: Cow<'a, str>, #[builder(default)] ttl: Option<u32>, #[builder(setter(into), default)] name: Option<Cow<'a, str>>, } impl<'a> EditDns<'a> { pub fn builder() -> EditDnsBuilder<'a> { EditDnsBuilder::default() } } impl<'a> Endpoint for EditDns<'a> { fn method(&self) -> Method { Method::POST } fn endpoint(&self) -> Cow<'static, str> { format!("dns/edit/{}/{}", self.domain, self.id).into() } fn body(&self) -> Map<String, Value> { let mut body = Map::default(); if let Some(name) = &self.name { body.insert("name".into(), name.to_string().into()); } if let Some(ttl) = self.ttl { body.insert("ttl".into(), ttl.to_string().into()); } fill_body_with_record(&mut body, &self.record); body } } #[cfg(test)] mod tests { use http::Method; use serde_json::json; use crate::{ api::{self, Query}, endpoints::{DnsContent, EditDns}, test::client::{ExpectedUrl, SingleTestClient}, }; #[test] fn record_is_necessary() { let err = EditDns::builder().build().unwrap_err(); assert_eq!("`record` must be initialized", err.to_string()) } #[test] fn domain_is_necessary() { let err = EditDns::builder() .record(DnsContent::Cname { content: "".to_string(), }) .build() .unwrap_err(); assert_eq!("`domain` must be initialized", err.to_string()) } #[test] fn id_is_necessary() { let err = EditDns::builder() .domain("") .record(DnsContent::Cname { content: "".to_string(), }) .build() .unwrap_err(); assert_eq!("`id` must be initialized", err.to_string()) } #[test] fn domain_record_and_id_are_sufficient() { EditDns::builder() .id("1234") .domain("example.com") .record(DnsContent::Cname { content: "".to_string(), }) .build() .unwrap(); } #[test] fn endpoint() { let endpoint = ExpectedUrl::builder() .method(Method::POST) .endpoint("dns/edit/example.com/1234") .content_type("application/json") .body_json(&json!({ "name": "*", "ttl": "600", "prio": "600", "type": "MX", "content": "cnameCname", })) .build() .unwrap(); let client = SingleTestClient::new_raw(endpoint, ""); let endpoint = EditDns::builder() .id("1234") .domain("example.com") .name("*") .record(DnsContent::Mx { priority: 600, content: "cnameCname".to_string(), }) .ttl(600) .build() .unwrap(); api::ignore(endpoint).query(&client).unwrap(); } }
25.144928
67
0.497406
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn record_is_necessary() {\n let err = EditDns::builder().build().unwrap_err();\n assert_eq!(\"`record` must be initialized\", err.to_string())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn domain_is_necessary() {\n let err = EditDns::builder()\n .record(DnsContent::Cname {\n content: \"\".to_string(),\n })\n .build()\n .unwrap_err();\n assert_eq!(\"`domain` must be initialized\", err.to_string())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn id_is_necessary() {\n let err = EditDns::builder()\n .domain(\"\")\n .record(DnsContent::Cname {\n content: \"\".to_string(),\n })\n .build()\n .unwrap_err();\n assert_eq!(\"`id` must be initialized\", err.to_string())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn domain_record_and_id_are_sufficient() {\n EditDns::builder()\n .id(\"1234\")\n .domain(\"example.com\")\n .record(DnsContent::Cname {\n content: \"\".to_string(),\n })\n .build()\n .unwrap();\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn endpoint() {\n let endpoint = ExpectedUrl::builder()\n .method(Method::POST)\n .endpoint(\"dns/edit/example.com/1234\")\n .content_type(\"application/json\")\n .body_json(&json!({\n \"name\": \"*\",\n \"ttl\": \"600\",\n \"prio\": \"600\",\n \"type\": \"MX\",\n \"content\": \"cnameCname\",\n }))\n .build()\n .unwrap();\n let client = SingleTestClient::new_raw(endpoint, \"\");\n\n let endpoint = EditDns::builder()\n .id(\"1234\")\n .domain(\"example.com\")\n .name(\"*\")\n .record(DnsContent::Mx {\n priority: 600,\n content: \"cnameCname\".to_string(),\n })\n .ttl(600)\n .build()\n .unwrap();\n\n api::ignore(endpoint).query(&client).unwrap();\n }\n}" ]
f703fe03c06d740e5e4f8540ea77fb01065cea1e
4,721
rs
Rust
src/modifiers.rs
spaceapi-community/spaceapi-server-rs
6ac36d895866291f96c9f7d83a87768de4e46b67
[ "Apache-2.0", "MIT" ]
1
2019-07-08T20:08:16.000Z
2019-07-08T20:08:16.000Z
src/modifiers.rs
spaceapi-community/spaceapi-server-rs
6ac36d895866291f96c9f7d83a87768de4e46b67
[ "Apache-2.0", "MIT" ]
29
2018-01-21T23:37:59.000Z
2021-08-29T14:52:29.000Z
src/modifiers.rs
spaceapi-community/spaceapi-server-rs
6ac36d895866291f96c9f7d83a87768de4e46b67
[ "Apache-2.0", "MIT" ]
4
2018-10-11T09:38:56.000Z
2019-11-15T14:49:03.000Z
//! Modifiers which can be injected by the application logic to change the //! state dynamically per request. use crate::api; /// `StatusModifier`s are used to modify the status pub trait StatusModifier: Send + Sync { /// Called after all registered sensors are read fn modify(&self, status: &mut api::Status); } /// This modifier updates the opening state based on the /// first people now present sensor (if present). pub struct StateFromPeopleNowPresent; impl StatusModifier for StateFromPeopleNowPresent { fn modify(&self, status: &mut api::Status) { // Update state depending on number of people present let people_now_present: Option<u64> = status .sensors .as_ref() .and_then(|sensors: &api::Sensors| sensors.people_now_present.first()) .map(|sensor: &api::PeopleNowPresentSensor| sensor.value); if let Some(count) = people_now_present { let mut state = status.state.clone().unwrap_or_default(); state.open = Some(count > 0); // comparison chain is actually cleaner here IMO #[allow(clippy::comparison_chain)] if count == 1 { state.message = Some(format!("{} person here right now", count)); } else if count > 1 { state.message = Some(format!("{} people here right now", count)); } status.state = Some(state); } } } #[cfg(test)] mod tests { use super::*; mod state_from_people_now_present { use super::*; #[test] fn no_sensors() { let mut status = api::Status { sensors: None, ..api::Status::default() }; assert_eq!(status.state, None); StateFromPeopleNowPresent.modify(&mut status); assert_eq!(status.sensors, None); assert_eq!(status.state, None); } #[test] fn no_people_present_sensor() { let mut status = api::Status { sensors: Some(api::Sensors { people_now_present: vec![], temperature: vec![], }), ..api::Status::default() }; assert_eq!(status.state, None); StateFromPeopleNowPresent.modify(&mut status); assert_eq!(status.state, None); } fn make_pnp_sensor(value: u64) -> api::PeopleNowPresentSensor { api::PeopleNowPresentSensor { location: None, name: None, names: None, description: None, value, } } #[test] fn zero_people_present() { let mut status = api::Status { sensors: Some(api::Sensors { people_now_present: vec![make_pnp_sensor(0)], temperature: vec![], }), state: Some(api::State::default()), ..api::Status::default() }; status.state.as_mut().unwrap().message = Some("This will remain unchanged.".to_string()); assert_eq!( status.state.as_ref().unwrap().message, Some("This will remain unchanged.".to_string()) ); StateFromPeopleNowPresent.modify(&mut status); assert_eq!( status.state.unwrap().message, Some("This will remain unchanged.".to_string()) ); } #[test] fn one_person_present() { let mut status = api::Status { sensors: Some(api::Sensors { people_now_present: vec![make_pnp_sensor(1)], temperature: vec![], }), ..api::Status::default() }; assert_eq!(status.state, None); StateFromPeopleNowPresent.modify(&mut status); assert_eq!( status.state.unwrap().message, Some("1 person here right now".to_string()) ); } #[test] fn two_people_present() { let mut status = api::Status { sensors: Some(api::Sensors { people_now_present: vec![make_pnp_sensor(2)], temperature: vec![], }), ..api::Status::default() }; assert_eq!(status.state, None); StateFromPeopleNowPresent.modify(&mut status); assert_eq!( status.state.as_ref().unwrap().message, Some("2 people here right now".to_string()) ); } } }
33.964029
101
0.512603
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn no_sensors() {\n let mut status = api::Status {\n sensors: None,\n ..api::Status::default()\n };\n assert_eq!(status.state, None);\n StateFromPeopleNowPresent.modify(&mut status);\n assert_eq!(status.sensors, None);\n assert_eq!(status.state, None);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn no_people_present_sensor() {\n let mut status = api::Status {\n sensors: Some(api::Sensors {\n people_now_present: vec![],\n temperature: vec![],\n }),\n ..api::Status::default()\n };\n assert_eq!(status.state, None);\n StateFromPeopleNowPresent.modify(&mut status);\n assert_eq!(status.state, None);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn zero_people_present() {\n let mut status = api::Status {\n sensors: Some(api::Sensors {\n people_now_present: vec![make_pnp_sensor(0)],\n temperature: vec![],\n }),\n state: Some(api::State::default()),\n ..api::Status::default()\n };\n status.state.as_mut().unwrap().message = Some(\"This will remain unchanged.\".to_string());\n assert_eq!(\n status.state.as_ref().unwrap().message,\n Some(\"This will remain unchanged.\".to_string())\n );\n StateFromPeopleNowPresent.modify(&mut status);\n assert_eq!(\n status.state.unwrap().message,\n Some(\"This will remain unchanged.\".to_string())\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn one_person_present() {\n let mut status = api::Status {\n sensors: Some(api::Sensors {\n people_now_present: vec![make_pnp_sensor(1)],\n temperature: vec![],\n }),\n ..api::Status::default()\n };\n assert_eq!(status.state, None);\n StateFromPeopleNowPresent.modify(&mut status);\n assert_eq!(\n status.state.unwrap().message,\n Some(\"1 person here right now\".to_string())\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn two_people_present() {\n let mut status = api::Status {\n sensors: Some(api::Sensors {\n people_now_present: vec![make_pnp_sensor(2)],\n temperature: vec![],\n }),\n ..api::Status::default()\n };\n assert_eq!(status.state, None);\n StateFromPeopleNowPresent.modify(&mut status);\n assert_eq!(\n status.state.as_ref().unwrap().message,\n Some(\"2 people here right now\".to_string())\n );\n }\n}" ]
f703fe3c7ad8b0a87c38e289b2eef085f76ea509
13,942
rs
Rust
src/mmap_unix.rs
gc-plp/vm-memory
a481693670c2954b81e65b8762fa40f61404976c
[ "Apache-2.0", "BSD-3-Clause" ]
null
null
null
src/mmap_unix.rs
gc-plp/vm-memory
a481693670c2954b81e65b8762fa40f61404976c
[ "Apache-2.0", "BSD-3-Clause" ]
null
null
null
src/mmap_unix.rs
gc-plp/vm-memory
a481693670c2954b81e65b8762fa40f61404976c
[ "Apache-2.0", "BSD-3-Clause" ]
null
null
null
// Copyright (C) 2019 Alibaba Cloud Computing. All rights reserved. // // Portions Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. // // Portions Copyright 2017 The Chromium OS Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE-BSD-3-Clause file. // // SPDX-License-Identifier: Apache-2.0 OR BSD-3-Clause //! Helper structure for working with mmaped memory regions in Unix. use std::error; use std::fmt; use std::io; use std::os::unix::io::AsRawFd; use std::ptr::null_mut; use std::result; use libc; use crate::guest_memory::FileOffset; use crate::mmap::{check_file_offset, AsSlice}; use crate::volatile_memory::{self, compute_offset, VolatileMemory, VolatileSlice}; /// Error conditions that may arise when creating a new `MmapRegion` object. #[derive(Debug)] pub enum Error { /// The specified file offset and length cause overflow when added. InvalidOffsetLength, /// The forbidden `MAP_FIXED` flag was specified. MapFixed, /// Mappings using the same fd overlap in terms of file offset and length. MappingOverlap, /// A mapping with offset + length > EOF was attempted. MappingPastEof, /// The `mmap` call returned an error. Mmap(io::Error), } impl fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter) -> fmt::Result { match self { Error::InvalidOffsetLength => write!( f, "The specified file offset and length cause overflow when added" ), Error::MapFixed => write!(f, "The forbidden `MAP_FIXED` flag was specified"), Error::MappingOverlap => write!( f, "Mappings using the same fd overlap in terms of file offset and length" ), Error::MappingPastEof => write!( f, "The specified file offset and length is greater then file length" ), Error::Mmap(error) => write!(f, "{}", error), } } } impl error::Error for Error {} pub type Result<T> = result::Result<T, Error>; /// Helper structure for working with mmaped memory regions in Unix. /// /// The structure is used for accessing the guest's physical memory by mmapping it into /// the current process. /// /// # Limitations /// When running a 64-bit virtual machine on a 32-bit hypervisor, only part of the guest's /// physical memory may be mapped into the current process due to the limited virtual address /// space size of the process. #[derive(Debug)] pub struct MmapRegion { addr: *mut u8, size: usize, file_offset: Option<FileOffset>, prot: i32, flags: i32, } // Send and Sync aren't automatically inherited for the raw address pointer. // Accessing that pointer is only done through the stateless interface which // allows the object to be shared by multiple threads without a decrease in // safety. unsafe impl Send for MmapRegion {} unsafe impl Sync for MmapRegion {} impl MmapRegion { /// Creates a shared anonymous mapping of `size` bytes. /// /// # Arguments /// * `size` - The size of the memory region in bytes. pub fn new(size: usize) -> Result<Self> { Self::build( None, size, libc::PROT_READ | libc::PROT_WRITE, libc::MAP_ANONYMOUS | libc::MAP_NORESERVE | libc::MAP_PRIVATE | libc::MAP_HUGETLB, ) } /// Creates a shared file mapping of `size` bytes. /// /// # Arguments /// * `file_offset` - The mapping will be created at offset `file_offset.start` in the file /// referred to by `file_offset.file`. /// * `size` - The size of the memory region in bytes. pub fn from_file(file_offset: FileOffset, size: usize) -> Result<Self> { Self::build( Some(file_offset), size, libc::PROT_READ | libc::PROT_WRITE, libc::MAP_NORESERVE | libc::MAP_SHARED, ) } /// Creates a mapping based on the provided arguments. /// /// # Arguments /// * `file_offset` - if provided, the method will create a file mapping at offset /// `file_offset.start` in the file referred to by `file_offset.file`. /// * `size` - The size of the memory region in bytes. /// * `prot` - The desired memory protection of the mapping. /// * `flags` - This argument determines whether updates to the mapping are visible to other /// processes mapping the same region, and whether updates are carried through to /// the underlying file. pub fn build( file_offset: Option<FileOffset>, size: usize, prot: i32, flags: i32, ) -> Result<Self> { // Forbid MAP_FIXED, as it doesn't make sense in this context, and is pretty dangerous // in general. if flags & libc::MAP_FIXED != 0 { return Err(Error::MapFixed); } let (fd, offset) = if let Some(ref f_off) = file_offset { check_file_offset(f_off, size)?; (f_off.file().as_raw_fd(), f_off.start()) } else { (-1, 0) }; // This is safe because we're not allowing MAP_FIXED, and invalid parameters cannot break // Rust safety guarantees (things may change if we're mapping /dev/mem or some wacky file). let addr = unsafe { libc::mmap(null_mut(), size, prot, flags, fd, offset as libc::off_t) }; if addr == libc::MAP_FAILED { return Err(Error::Mmap(io::Error::last_os_error())); } Ok(Self { addr: addr as *mut u8, size, file_offset, prot, flags, }) } /// Returns a pointer to the beginning of the memory region. /// /// Should only be used for passing this region to ioctls for setting guest memory. pub fn as_ptr(&self) -> *mut u8 { self.addr } /// Returns the size of this region. pub fn size(&self) -> usize { self.size } /// Returns information regarding the offset into the file backing this region (if any). pub fn file_offset(&self) -> Option<&FileOffset> { self.file_offset.as_ref() } /// Returns the value of the `prot` parameter passed to `mmap` when mapping this region. pub fn prot(&self) -> i32 { self.prot } /// Returns the value of the `flags` parameter passed to `mmap` when mapping this region. pub fn flags(&self) -> i32 { self.flags } /// Checks whether this region and `other` are backed by overlapping /// [`FileOffset`](struct.FileOffset.html) objects. /// /// This is mostly a sanity check available for convenience, as different file descriptors /// can alias the same file. pub fn fds_overlap(&self, other: &MmapRegion) -> bool { if let Some(f_off1) = self.file_offset() { if let Some(f_off2) = other.file_offset() { if f_off1.file().as_raw_fd() == f_off2.file().as_raw_fd() { let s1 = f_off1.start(); let s2 = f_off2.start(); let l1 = self.len() as u64; let l2 = other.len() as u64; if s1 < s2 { return s1 + l1 > s2; } else { return s2 + l2 > s1; } } } } false } } impl AsSlice for MmapRegion { unsafe fn as_slice(&self) -> &[u8] { // This is safe because we mapped the area at addr ourselves, so this slice will not // overflow. However, it is possible to alias. std::slice::from_raw_parts(self.addr, self.size) } #[allow(clippy::mut_from_ref)] unsafe fn as_mut_slice(&self) -> &mut [u8] { // This is safe because we mapped the area at addr ourselves, so this slice will not // overflow. However, it is possible to alias. std::slice::from_raw_parts_mut(self.addr, self.size) } } impl VolatileMemory for MmapRegion { fn len(&self) -> usize { self.size } fn get_slice(&self, offset: usize, count: usize) -> volatile_memory::Result<VolatileSlice> { let end = compute_offset(offset, count)?; if end > self.size { return Err(volatile_memory::Error::OutOfBounds { addr: end }); } // Safe because we checked that offset + count was within our range and we only ever hand // out volatile accessors. Ok(unsafe { VolatileSlice::new((self.addr as usize + offset) as *mut _, count) }) } } impl Drop for MmapRegion { fn drop(&mut self) { // This is safe because we mmap the area at addr ourselves, and nobody // else is holding a reference to it. unsafe { libc::munmap(self.addr as *mut libc::c_void, self.size); } } } #[cfg(test)] mod tests { use super::*; use std::io::Write; use std::slice; use std::sync::Arc; use vmm_sys_util::tempfile::TempFile; // Adding a helper method to extract the errno within an Error::Mmap(e), or return a // distinctive value when the error is represented by another variant. impl Error { pub fn raw_os_error(&self) -> i32 { match self { Error::Mmap(e) => e.raw_os_error().unwrap(), _ => std::i32::MIN, } } } #[test] fn test_mmap_region_new() { assert!(MmapRegion::new(0).is_err()); let size = 4096; let r = MmapRegion::new(4096).unwrap(); assert_eq!(r.size(), size); assert!(r.file_offset().is_none()); assert_eq!(r.prot(), libc::PROT_READ | libc::PROT_WRITE); assert_eq!( r.flags(), libc::MAP_ANONYMOUS | libc::MAP_NORESERVE | libc::MAP_PRIVATE ); } #[test] fn test_mmap_region_from_file() { let mut f = TempFile::new().unwrap().into_file(); let offset: usize = 0; let buf1 = [1u8, 2, 3, 4, 5]; f.write_all(buf1.as_ref()).unwrap(); let r = MmapRegion::from_file(FileOffset::new(f, offset as u64), buf1.len()).unwrap(); assert_eq!(r.size(), buf1.len() - offset); assert_eq!(r.file_offset().unwrap().start(), offset as u64); assert_eq!(r.prot(), libc::PROT_READ | libc::PROT_WRITE); assert_eq!(r.flags(), libc::MAP_NORESERVE | libc::MAP_SHARED); let buf2 = unsafe { slice::from_raw_parts(r.as_ptr(), buf1.len() - offset) }; assert_eq!(&buf1[offset..], buf2); } #[test] fn test_mmap_region_build() { let a = Arc::new(TempFile::new().unwrap().into_file()); let prot = libc::PROT_READ | libc::PROT_WRITE; let flags = libc::MAP_NORESERVE | libc::MAP_PRIVATE; let offset = 4096; let size = 1000; // Offset + size will overflow. let r = MmapRegion::build( Some(FileOffset::from_arc(a.clone(), std::u64::MAX)), size, prot, flags, ); assert_eq!(format!("{:?}", r.unwrap_err()), "InvalidOffsetLength"); // Offset + size is greater than the size of the file (which is 0 at this point). let r = MmapRegion::build( Some(FileOffset::from_arc(a.clone(), offset)), size, prot, flags, ); assert_eq!(format!("{:?}", r.unwrap_err()), "MappingPastEof"); // MAP_FIXED was specified among the flags. let r = MmapRegion::build( Some(FileOffset::from_arc(a.clone(), offset)), size, prot, flags | libc::MAP_FIXED, ); assert_eq!(format!("{:?}", r.unwrap_err()), "MapFixed"); // Let's resize the file. assert_eq!(unsafe { libc::ftruncate(a.as_raw_fd(), 1024 * 10) }, 0); // The offset is not properly aligned. let r = MmapRegion::build( Some(FileOffset::from_arc(a.clone(), offset - 1)), size, prot, flags, ); assert_eq!(r.unwrap_err().raw_os_error(), libc::EINVAL); // The build should be successful now. let r = MmapRegion::build( Some(FileOffset::from_arc(a.clone(), offset)), size, prot, flags, ) .unwrap(); assert_eq!(r.size(), size); assert_eq!(r.file_offset().unwrap().start(), offset as u64); assert_eq!(r.prot(), libc::PROT_READ | libc::PROT_WRITE); assert_eq!(r.flags(), libc::MAP_NORESERVE | libc::MAP_PRIVATE); } #[test] fn test_mmap_region_fds_overlap() { let a = Arc::new(TempFile::new().unwrap().into_file()); assert_eq!(unsafe { libc::ftruncate(a.as_raw_fd(), 1024 * 10) }, 0); let r1 = MmapRegion::from_file(FileOffset::from_arc(a.clone(), 0), 4096).unwrap(); let r2 = MmapRegion::from_file(FileOffset::from_arc(a.clone(), 4096), 4096).unwrap(); assert!(!r1.fds_overlap(&r2)); let r1 = MmapRegion::from_file(FileOffset::from_arc(a.clone(), 0), 5000).unwrap(); assert!(r1.fds_overlap(&r2)); let r2 = MmapRegion::from_file(FileOffset::from_arc(a.clone(), 0), 1000).unwrap(); assert!(r1.fds_overlap(&r2)); // Different files, so there's not overlap. let new_file = TempFile::new().unwrap().into_file(); // Resize before mapping. assert_eq!( unsafe { libc::ftruncate(new_file.as_raw_fd(), 1024 * 10) }, 0 ); let r2 = MmapRegion::from_file(FileOffset::new(new_file, 0), 5000).unwrap(); assert!(!r1.fds_overlap(&r2)); // R2 is not file backed, so no overlap. let r2 = MmapRegion::new(5000).unwrap(); assert!(!r1.fds_overlap(&r2)); } }
34.171569
99
0.581265
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_mmap_region_new() {\n assert!(MmapRegion::new(0).is_err());\n\n let size = 4096;\n\n let r = MmapRegion::new(4096).unwrap();\n assert_eq!(r.size(), size);\n assert!(r.file_offset().is_none());\n assert_eq!(r.prot(), libc::PROT_READ | libc::PROT_WRITE);\n assert_eq!(\n r.flags(),\n libc::MAP_ANONYMOUS | libc::MAP_NORESERVE | libc::MAP_PRIVATE\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_mmap_region_from_file() {\n let mut f = TempFile::new().unwrap().into_file();\n let offset: usize = 0;\n let buf1 = [1u8, 2, 3, 4, 5];\n\n f.write_all(buf1.as_ref()).unwrap();\n let r = MmapRegion::from_file(FileOffset::new(f, offset as u64), buf1.len()).unwrap();\n\n assert_eq!(r.size(), buf1.len() - offset);\n assert_eq!(r.file_offset().unwrap().start(), offset as u64);\n assert_eq!(r.prot(), libc::PROT_READ | libc::PROT_WRITE);\n assert_eq!(r.flags(), libc::MAP_NORESERVE | libc::MAP_SHARED);\n\n let buf2 = unsafe { slice::from_raw_parts(r.as_ptr(), buf1.len() - offset) };\n assert_eq!(&buf1[offset..], buf2);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_mmap_region_build() {\n let a = Arc::new(TempFile::new().unwrap().into_file());\n\n let prot = libc::PROT_READ | libc::PROT_WRITE;\n let flags = libc::MAP_NORESERVE | libc::MAP_PRIVATE;\n let offset = 4096;\n let size = 1000;\n\n // Offset + size will overflow.\n let r = MmapRegion::build(\n Some(FileOffset::from_arc(a.clone(), std::u64::MAX)),\n size,\n prot,\n flags,\n );\n assert_eq!(format!(\"{:?}\", r.unwrap_err()), \"InvalidOffsetLength\");\n\n // Offset + size is greater than the size of the file (which is 0 at this point).\n let r = MmapRegion::build(\n Some(FileOffset::from_arc(a.clone(), offset)),\n size,\n prot,\n flags,\n );\n assert_eq!(format!(\"{:?}\", r.unwrap_err()), \"MappingPastEof\");\n\n // MAP_FIXED was specified among the flags.\n let r = MmapRegion::build(\n Some(FileOffset::from_arc(a.clone(), offset)),\n size,\n prot,\n flags | libc::MAP_FIXED,\n );\n assert_eq!(format!(\"{:?}\", r.unwrap_err()), \"MapFixed\");\n\n // Let's resize the file.\n assert_eq!(unsafe { libc::ftruncate(a.as_raw_fd(), 1024 * 10) }, 0);\n\n // The offset is not properly aligned.\n let r = MmapRegion::build(\n Some(FileOffset::from_arc(a.clone(), offset - 1)),\n size,\n prot,\n flags,\n );\n assert_eq!(r.unwrap_err().raw_os_error(), libc::EINVAL);\n\n // The build should be successful now.\n let r = MmapRegion::build(\n Some(FileOffset::from_arc(a.clone(), offset)),\n size,\n prot,\n flags,\n )\n .unwrap();\n\n assert_eq!(r.size(), size);\n assert_eq!(r.file_offset().unwrap().start(), offset as u64);\n assert_eq!(r.prot(), libc::PROT_READ | libc::PROT_WRITE);\n assert_eq!(r.flags(), libc::MAP_NORESERVE | libc::MAP_PRIVATE);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_mmap_region_fds_overlap() {\n let a = Arc::new(TempFile::new().unwrap().into_file());\n assert_eq!(unsafe { libc::ftruncate(a.as_raw_fd(), 1024 * 10) }, 0);\n\n let r1 = MmapRegion::from_file(FileOffset::from_arc(a.clone(), 0), 4096).unwrap();\n let r2 = MmapRegion::from_file(FileOffset::from_arc(a.clone(), 4096), 4096).unwrap();\n assert!(!r1.fds_overlap(&r2));\n\n let r1 = MmapRegion::from_file(FileOffset::from_arc(a.clone(), 0), 5000).unwrap();\n assert!(r1.fds_overlap(&r2));\n\n let r2 = MmapRegion::from_file(FileOffset::from_arc(a.clone(), 0), 1000).unwrap();\n assert!(r1.fds_overlap(&r2));\n\n // Different files, so there's not overlap.\n let new_file = TempFile::new().unwrap().into_file();\n // Resize before mapping.\n assert_eq!(\n unsafe { libc::ftruncate(new_file.as_raw_fd(), 1024 * 10) },\n 0\n );\n let r2 = MmapRegion::from_file(FileOffset::new(new_file, 0), 5000).unwrap();\n assert!(!r1.fds_overlap(&r2));\n\n // R2 is not file backed, so no overlap.\n let r2 = MmapRegion::new(5000).unwrap();\n assert!(!r1.fds_overlap(&r2));\n }\n}" ]
f703ff99660c6ae4691119ad4d830d8b530f0300
44,774
rs
Rust
src/libcollections/string.rs
kmcguire3413/rust
27bdf5ccbfe6f613b00e4c3b023140576f394172
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
src/libcollections/string.rs
kmcguire3413/rust
27bdf5ccbfe6f613b00e4c3b023140576f394172
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
src/libcollections/string.rs
kmcguire3413/rust
27bdf5ccbfe6f613b00e4c3b023140576f394172
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // // ignore-lexer-test FIXME #15679 //! An owned, growable string that enforces that its contents are valid UTF-8. #![stable] use core::prelude::*; use core::borrow::{Cow, IntoCow}; use core::default::Default; use core::fmt; use core::hash; use core::mem; use core::ptr; use core::ops; use core::raw::Slice as RawSlice; use unicode::str as unicode_str; use unicode::str::Utf16Item; use slice::CloneSliceExt; use str::{mod, CharRange, FromStr, Utf8Error}; use vec::{DerefVec, Vec, as_vec}; /// A growable string stored as a UTF-8 encoded buffer. #[deriving(Clone, PartialOrd, Eq, Ord)] #[stable] pub struct String { vec: Vec<u8>, } /// A possible error value from the `String::from_utf8` function. #[stable] pub struct FromUtf8Error { bytes: Vec<u8>, error: Utf8Error, } /// A possible error value from the `String::from_utf16` function. #[stable] #[allow(missing_copy_implementations)] pub struct FromUtf16Error(()); impl String { /// Creates a new string buffer initialized with the empty string. /// /// # Examples /// /// ``` /// let mut s = String::new(); /// ``` #[inline] #[stable] pub fn new() -> String { String { vec: Vec::new(), } } /// Creates a new string buffer with the given capacity. /// The string will be able to hold exactly `capacity` bytes without /// reallocating. If `capacity` is 0, the string will not allocate. /// /// # Examples /// /// ``` /// let mut s = String::with_capacity(10); /// ``` #[inline] #[stable] pub fn with_capacity(capacity: uint) -> String { String { vec: Vec::with_capacity(capacity), } } /// Creates a new string buffer from the given string. /// /// # Examples /// /// ``` /// let s = String::from_str("hello"); /// assert_eq!(s.as_slice(), "hello"); /// ``` #[inline] #[experimental = "needs investigation to see if to_string() can match perf"] pub fn from_str(string: &str) -> String { String { vec: string.as_bytes().to_vec() } } /// Returns the vector as a string buffer, if possible, taking care not to /// copy it. /// /// # Failure /// /// If the given vector is not valid UTF-8, then the original vector and the /// corresponding error is returned. /// /// # Examples /// /// ```rust /// # #![allow(deprecated)] /// use std::str::Utf8Error; /// /// let hello_vec = vec![104, 101, 108, 108, 111]; /// let s = String::from_utf8(hello_vec).unwrap(); /// assert_eq!(s, "hello"); /// /// let invalid_vec = vec![240, 144, 128]; /// let s = String::from_utf8(invalid_vec).err().unwrap(); /// assert_eq!(s.utf8_error(), Utf8Error::TooShort); /// assert_eq!(s.into_bytes(), vec![240, 144, 128]); /// ``` #[inline] #[stable] pub fn from_utf8(vec: Vec<u8>) -> Result<String, FromUtf8Error> { match str::from_utf8(vec.as_slice()) { Ok(..) => Ok(String { vec: vec }), Err(e) => Err(FromUtf8Error { bytes: vec, error: e }) } } /// Converts a vector of bytes to a new UTF-8 string. /// Any invalid UTF-8 sequences are replaced with U+FFFD REPLACEMENT CHARACTER. /// /// # Examples /// /// ```rust /// let input = b"Hello \xF0\x90\x80World"; /// let output = String::from_utf8_lossy(input); /// assert_eq!(output.as_slice(), "Hello \u{FFFD}World"); /// ``` #[stable] pub fn from_utf8_lossy<'a>(v: &'a [u8]) -> CowString<'a> { match str::from_utf8(v) { Ok(s) => return Cow::Borrowed(s), Err(..) => {} } static TAG_CONT_U8: u8 = 128u8; static REPLACEMENT: &'static [u8] = b"\xEF\xBF\xBD"; // U+FFFD in UTF-8 let mut i = 0; let total = v.len(); fn unsafe_get(xs: &[u8], i: uint) -> u8 { unsafe { *xs.get_unchecked(i) } } fn safe_get(xs: &[u8], i: uint, total: uint) -> u8 { if i >= total { 0 } else { unsafe_get(xs, i) } } let mut res = String::with_capacity(total); if i > 0 { unsafe { res.as_mut_vec().push_all(v[..i]) }; } // subseqidx is the index of the first byte of the subsequence we're looking at. // It's used to copy a bunch of contiguous good codepoints at once instead of copying // them one by one. let mut subseqidx = 0; while i < total { let i_ = i; let byte = unsafe_get(v, i); i += 1; macro_rules! error(() => ({ unsafe { if subseqidx != i_ { res.as_mut_vec().push_all(v[subseqidx..i_]); } subseqidx = i; res.as_mut_vec().push_all(REPLACEMENT); } })); if byte < 128u8 { // subseqidx handles this } else { let w = unicode_str::utf8_char_width(byte); match w { 2 => { if safe_get(v, i, total) & 192u8 != TAG_CONT_U8 { error!(); continue; } i += 1; } 3 => { match (byte, safe_get(v, i, total)) { (0xE0 , 0xA0 ... 0xBF) => (), (0xE1 ... 0xEC, 0x80 ... 0xBF) => (), (0xED , 0x80 ... 0x9F) => (), (0xEE ... 0xEF, 0x80 ... 0xBF) => (), _ => { error!(); continue; } } i += 1; if safe_get(v, i, total) & 192u8 != TAG_CONT_U8 { error!(); continue; } i += 1; } 4 => { match (byte, safe_get(v, i, total)) { (0xF0 , 0x90 ... 0xBF) => (), (0xF1 ... 0xF3, 0x80 ... 0xBF) => (), (0xF4 , 0x80 ... 0x8F) => (), _ => { error!(); continue; } } i += 1; if safe_get(v, i, total) & 192u8 != TAG_CONT_U8 { error!(); continue; } i += 1; if safe_get(v, i, total) & 192u8 != TAG_CONT_U8 { error!(); continue; } i += 1; } _ => { error!(); continue; } } } } if subseqidx < total { unsafe { res.as_mut_vec().push_all(v[subseqidx..total]) }; } Cow::Owned(res) } /// Decode a UTF-16 encoded vector `v` into a `String`, returning `None` /// if `v` contains any invalid data. /// /// # Examples /// /// ```rust /// // 𝄞music /// let mut v = &mut [0xD834, 0xDD1E, 0x006d, 0x0075, /// 0x0073, 0x0069, 0x0063]; /// assert_eq!(String::from_utf16(v).unwrap(), /// "𝄞music".to_string()); /// /// // 𝄞mu<invalid>ic /// v[4] = 0xD800; /// assert!(String::from_utf16(v).is_err()); /// ``` #[stable] pub fn from_utf16(v: &[u16]) -> Result<String, FromUtf16Error> { let mut s = String::with_capacity(v.len()); for c in unicode_str::utf16_items(v) { match c { Utf16Item::ScalarValue(c) => s.push(c), Utf16Item::LoneSurrogate(_) => return Err(FromUtf16Error(())), } } Ok(s) } /// Decode a UTF-16 encoded vector `v` into a string, replacing /// invalid data with the replacement character (U+FFFD). /// /// # Examples /// /// ```rust /// // 𝄞mus<invalid>ic<invalid> /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075, /// 0x0073, 0xDD1E, 0x0069, 0x0063, /// 0xD834]; /// /// assert_eq!(String::from_utf16_lossy(v), /// "𝄞mus\u{FFFD}ic\u{FFFD}".to_string()); /// ``` #[stable] pub fn from_utf16_lossy(v: &[u16]) -> String { unicode_str::utf16_items(v).map(|c| c.to_char_lossy()).collect() } /// Convert a vector of `char`s to a `String`. /// /// # Examples /// /// ```rust /// # #![allow(deprecated)] /// let chars = &['h', 'e', 'l', 'l', 'o']; /// let s = String::from_chars(chars); /// assert_eq!(s.as_slice(), "hello"); /// ``` #[inline] #[deprecated = "use .collect() instead"] pub fn from_chars(chs: &[char]) -> String { chs.iter().map(|c| *c).collect() } /// Creates a new `String` from a length, capacity, and pointer. /// /// This is unsafe because: /// * We call `Vec::from_raw_parts` to get a `Vec<u8>`; /// * We assume that the `Vec` contains valid UTF-8. #[inline] #[stable] pub unsafe fn from_raw_parts(buf: *mut u8, length: uint, capacity: uint) -> String { String { vec: Vec::from_raw_parts(buf, length, capacity), } } /// Creates a `String` from a null-terminated `*const u8` buffer. /// /// This function is unsafe because we dereference memory until we find the /// NUL character, which is not guaranteed to be present. Additionally, the /// slice is not checked to see whether it contains valid UTF-8 #[unstable = "just renamed from `mod raw`"] pub unsafe fn from_raw_buf(buf: *const u8) -> String { String::from_str(str::from_c_str(buf as *const i8)) } /// Creates a `String` from a `*const u8` buffer of the given length. /// /// This function is unsafe because it blindly assumes the validity of the /// pointer `buf` for `len` bytes of memory. This function will copy the /// memory from `buf` into a new allocation (owned by the returned /// `String`). /// /// This function is also unsafe because it does not validate that the /// buffer is valid UTF-8 encoded data. #[unstable = "just renamed from `mod raw`"] pub unsafe fn from_raw_buf_len(buf: *const u8, len: uint) -> String { String::from_utf8_unchecked(Vec::from_raw_buf(buf, len)) } /// Converts a vector of bytes to a new `String` without checking if /// it contains valid UTF-8. This is unsafe because it assumes that /// the UTF-8-ness of the vector has already been validated. #[inline] #[stable] pub unsafe fn from_utf8_unchecked(bytes: Vec<u8>) -> String { String { vec: bytes } } /// Return the underlying byte buffer, encoded as UTF-8. /// /// # Examples /// /// ``` /// let s = String::from_str("hello"); /// let bytes = s.into_bytes(); /// assert_eq!(bytes, vec![104, 101, 108, 108, 111]); /// ``` #[inline] #[stable] pub fn into_bytes(self) -> Vec<u8> { self.vec } /// Creates a string buffer by repeating a character `length` times. /// /// # Examples /// /// ``` /// # #![allow(deprecated)] /// let s = String::from_char(5, 'a'); /// assert_eq!(s.as_slice(), "aaaaa"); /// ``` #[inline] #[deprecated = "use repeat(ch).take(length).collect() instead"] pub fn from_char(length: uint, ch: char) -> String { if length == 0 { return String::new() } let mut buf = String::new(); buf.push(ch); let size = buf.len() * (length - 1); buf.reserve_exact(size); for _ in range(1, length) { buf.push(ch) } buf } /// Pushes the given string onto this string buffer. /// /// # Examples /// /// ``` /// let mut s = String::from_str("foo"); /// s.push_str("bar"); /// assert_eq!(s.as_slice(), "foobar"); /// ``` #[inline] #[stable] pub fn push_str(&mut self, string: &str) { self.vec.push_all(string.as_bytes()) } /// Pushes `ch` onto the given string `count` times. /// /// # Examples /// /// ``` /// # #![allow(deprecated)] /// let mut s = String::from_str("foo"); /// s.grow(5, 'Z'); /// assert_eq!(s.as_slice(), "fooZZZZZ"); /// ``` #[inline] #[deprecated = "deprecated in favor of .extend(repeat(ch).take(count))"] pub fn grow(&mut self, count: uint, ch: char) { for _ in range(0, count) { self.push(ch) } } /// Returns the number of bytes that this string buffer can hold without /// reallocating. /// /// # Examples /// /// ``` /// let s = String::with_capacity(10); /// assert!(s.capacity() >= 10); /// ``` #[inline] #[stable] pub fn capacity(&self) -> uint { self.vec.capacity() } /// Deprecated: Renamed to `reserve`. #[deprecated = "Renamed to `reserve`"] pub fn reserve_additional(&mut self, extra: uint) { self.vec.reserve(extra) } /// Reserves capacity for at least `additional` more bytes to be inserted /// in the given `String`. The collection may reserve more space to avoid /// frequent reallocations. /// /// # Panics /// /// Panics if the new capacity overflows `uint`. /// /// # Examples /// /// ``` /// let mut s = String::new(); /// s.reserve(10); /// assert!(s.capacity() >= 10); /// ``` #[inline] #[stable] pub fn reserve(&mut self, additional: uint) { self.vec.reserve(additional) } /// Reserves the minimum capacity for exactly `additional` more bytes to be /// inserted in the given `String`. Does nothing if the capacity is already /// sufficient. /// /// Note that the allocator may give the collection more space than it /// requests. Therefore capacity can not be relied upon to be precisely /// minimal. Prefer `reserve` if future insertions are expected. /// /// # Panics /// /// Panics if the new capacity overflows `uint`. /// /// # Examples /// /// ``` /// let mut s = String::new(); /// s.reserve(10); /// assert!(s.capacity() >= 10); /// ``` #[inline] #[stable] pub fn reserve_exact(&mut self, additional: uint) { self.vec.reserve_exact(additional) } /// Shrinks the capacity of this string buffer to match its length. /// /// # Examples /// /// ``` /// let mut s = String::from_str("foo"); /// s.reserve(100); /// assert!(s.capacity() >= 100); /// s.shrink_to_fit(); /// assert_eq!(s.capacity(), 3); /// ``` #[inline] #[stable] pub fn shrink_to_fit(&mut self) { self.vec.shrink_to_fit() } /// Adds the given character to the end of the string. /// /// # Examples /// /// ``` /// let mut s = String::from_str("abc"); /// s.push('1'); /// s.push('2'); /// s.push('3'); /// assert_eq!(s.as_slice(), "abc123"); /// ``` #[inline] #[stable] pub fn push(&mut self, ch: char) { if (ch as u32) < 0x80 { self.vec.push(ch as u8); return; } let cur_len = self.len(); // This may use up to 4 bytes. self.vec.reserve(4); unsafe { // Attempt to not use an intermediate buffer by just pushing bytes // directly onto this string. let slice = RawSlice { data: self.vec.as_ptr().offset(cur_len as int), len: 4, }; let used = ch.encode_utf8(mem::transmute(slice)).unwrap_or(0); self.vec.set_len(cur_len + used); } } /// Works with the underlying buffer as a byte slice. /// /// # Examples /// /// ``` /// let s = String::from_str("hello"); /// let b: &[_] = &[104, 101, 108, 108, 111]; /// assert_eq!(s.as_bytes(), b); /// ``` #[inline] #[stable] pub fn as_bytes<'a>(&'a self) -> &'a [u8] { self.vec.as_slice() } /// Shortens a string to the specified length. /// /// # Panics /// /// Panics if `new_len` > current length, /// or if `new_len` is not a character boundary. /// /// # Examples /// /// ``` /// let mut s = String::from_str("hello"); /// s.truncate(2); /// assert_eq!(s.as_slice(), "he"); /// ``` #[inline] #[stable] pub fn truncate(&mut self, new_len: uint) { assert!(self.is_char_boundary(new_len)); self.vec.truncate(new_len) } /// Removes the last character from the string buffer and returns it. /// Returns `None` if this string buffer is empty. /// /// # Examples /// /// ``` /// let mut s = String::from_str("foo"); /// assert_eq!(s.pop(), Some('o')); /// assert_eq!(s.pop(), Some('o')); /// assert_eq!(s.pop(), Some('f')); /// assert_eq!(s.pop(), None); /// ``` #[inline] #[stable] pub fn pop(&mut self) -> Option<char> { let len = self.len(); if len == 0 { return None } let CharRange {ch, next} = self.char_range_at_reverse(len); unsafe { self.vec.set_len(next); } Some(ch) } /// Removes the character from the string buffer at byte position `idx` and /// returns it. /// /// # Warning /// /// This is an O(n) operation as it requires copying every element in the /// buffer. /// /// # Panics /// /// If `idx` does not lie on a character boundary, or if it is out of /// bounds, then this function will panic. /// /// # Examples /// /// ``` /// let mut s = String::from_str("foo"); /// assert_eq!(s.remove(0), 'f'); /// assert_eq!(s.remove(1), 'o'); /// assert_eq!(s.remove(0), 'o'); /// ``` #[stable] pub fn remove(&mut self, idx: uint) -> char { let len = self.len(); assert!(idx <= len); let CharRange { ch, next } = self.char_range_at(idx); unsafe { ptr::copy_memory(self.vec.as_mut_ptr().offset(idx as int), self.vec.as_ptr().offset(next as int), len - next); self.vec.set_len(len - (next - idx)); } ch } /// Insert a character into the string buffer at byte position `idx`. /// /// # Warning /// /// This is an O(n) operation as it requires copying every element in the /// buffer. /// /// # Panics /// /// If `idx` does not lie on a character boundary or is out of bounds, then /// this function will panic. #[stable] pub fn insert(&mut self, idx: uint, ch: char) { let len = self.len(); assert!(idx <= len); assert!(self.is_char_boundary(idx)); self.vec.reserve(4); let mut bits = [0, ..4]; let amt = ch.encode_utf8(&mut bits).unwrap(); unsafe { ptr::copy_memory(self.vec.as_mut_ptr().offset((idx + amt) as int), self.vec.as_ptr().offset(idx as int), len - idx); ptr::copy_memory(self.vec.as_mut_ptr().offset(idx as int), bits.as_ptr(), amt); self.vec.set_len(len + amt); } } /// Views the string buffer as a mutable sequence of bytes. /// /// This is unsafe because it does not check /// to ensure that the resulting string will be valid UTF-8. /// /// # Examples /// /// ``` /// let mut s = String::from_str("hello"); /// unsafe { /// let vec = s.as_mut_vec(); /// assert!(vec == &mut vec![104, 101, 108, 108, 111]); /// vec.reverse(); /// } /// assert_eq!(s.as_slice(), "olleh"); /// ``` #[stable] pub unsafe fn as_mut_vec<'a>(&'a mut self) -> &'a mut Vec<u8> { &mut self.vec } /// Return the number of bytes in this string. /// /// # Examples /// /// ``` /// let a = "foo".to_string(); /// assert_eq!(a.len(), 3); /// ``` #[inline] #[stable] pub fn len(&self) -> uint { self.vec.len() } /// Returns true if the string contains no bytes /// /// # Examples /// /// ``` /// let mut v = String::new(); /// assert!(v.is_empty()); /// v.push('a'); /// assert!(!v.is_empty()); /// ``` #[stable] pub fn is_empty(&self) -> bool { self.len() == 0 } /// Truncates the string, returning it to 0 length. /// /// # Examples /// /// ``` /// let mut s = "foo".to_string(); /// s.clear(); /// assert!(s.is_empty()); /// ``` #[inline] #[stable] pub fn clear(&mut self) { self.vec.clear() } } impl FromUtf8Error { /// Consume this error, returning the bytes that were attempted to make a /// `String` with. #[stable] pub fn into_bytes(self) -> Vec<u8> { self.bytes } /// Access the underlying UTF8-error that was the cause of this error. #[stable] pub fn utf8_error(&self) -> Utf8Error { self.error } } impl fmt::Show for FromUtf8Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.error.fmt(f) } } impl fmt::Show for FromUtf16Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { "invalid utf-16: lone surrogate found".fmt(f) } } #[experimental = "waiting on FromIterator stabilization"] impl FromIterator<char> for String { fn from_iter<I:Iterator<char>>(iterator: I) -> String { let mut buf = String::new(); buf.extend(iterator); buf } } #[experimental = "waiting on FromIterator stabilization"] impl<'a> FromIterator<&'a str> for String { fn from_iter<I:Iterator<&'a str>>(iterator: I) -> String { let mut buf = String::new(); buf.extend(iterator); buf } } #[experimental = "waiting on Extend stabilization"] impl Extend<char> for String { fn extend<I:Iterator<char>>(&mut self, mut iterator: I) { let (lower_bound, _) = iterator.size_hint(); self.reserve(lower_bound); for ch in iterator { self.push(ch) } } } #[experimental = "waiting on Extend stabilization"] impl<'a> Extend<&'a str> for String { fn extend<I: Iterator<&'a str>>(&mut self, mut iterator: I) { // A guess that at least one byte per iterator element will be needed. let (lower_bound, _) = iterator.size_hint(); self.reserve(lower_bound); for s in iterator { self.push_str(s) } } } #[stable] impl PartialEq for String { #[inline] fn eq(&self, other: &String) -> bool { PartialEq::eq(&**self, &**other) } #[inline] fn ne(&self, other: &String) -> bool { PartialEq::ne(&**self, &**other) } } macro_rules! impl_eq { ($lhs:ty, $rhs: ty) => { #[stable] impl<'a> PartialEq<$rhs> for $lhs { #[inline] fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&**self, &**other) } #[inline] fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&**self, &**other) } } #[stable] impl<'a> PartialEq<$lhs> for $rhs { #[inline] fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&**self, &**other) } #[inline] fn ne(&self, other: &$lhs) -> bool { PartialEq::ne(&**self, &**other) } } } } impl_eq! { String, &'a str } impl_eq! { CowString<'a>, String } #[stable] impl<'a, 'b> PartialEq<&'b str> for CowString<'a> { #[inline] fn eq(&self, other: &&'b str) -> bool { PartialEq::eq(&**self, &**other) } #[inline] fn ne(&self, other: &&'b str) -> bool { PartialEq::ne(&**self, &**other) } } #[stable] impl<'a, 'b> PartialEq<CowString<'a>> for &'b str { #[inline] fn eq(&self, other: &CowString<'a>) -> bool { PartialEq::eq(&**self, &**other) } #[inline] fn ne(&self, other: &CowString<'a>) -> bool { PartialEq::ne(&**self, &**other) } } #[experimental = "waiting on Str stabilization"] #[allow(deprecated)] impl Str for String { #[inline] #[stable] fn as_slice<'a>(&'a self) -> &'a str { unsafe { mem::transmute(self.vec.as_slice()) } } } #[stable] impl Default for String { #[stable] fn default() -> String { String::new() } } #[experimental = "waiting on Show stabilization"] impl fmt::Show for String { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { (**self).fmt(f) } } #[experimental = "waiting on Hash stabilization"] impl<H: hash::Writer> hash::Hash<H> for String { #[inline] fn hash(&self, hasher: &mut H) { (**self).hash(hasher) } } #[allow(deprecated)] #[deprecated = "Use overloaded `core::cmp::PartialEq`"] impl<'a, S: Str> Equiv<S> for String { #[inline] fn equiv(&self, other: &S) -> bool { self.as_slice() == other.as_slice() } } #[experimental = "waiting on Add stabilization"] impl<'a> Add<&'a str, String> for String { fn add(mut self, other: &str) -> String { self.push_str(other); self } } impl ops::Slice<uint, str> for String { #[inline] fn as_slice_<'a>(&'a self) -> &'a str { unsafe { mem::transmute(self.vec.as_slice()) } } #[inline] fn slice_from_or_fail<'a>(&'a self, from: &uint) -> &'a str { self[][*from..] } #[inline] fn slice_to_or_fail<'a>(&'a self, to: &uint) -> &'a str { self[][..*to] } #[inline] fn slice_or_fail<'a>(&'a self, from: &uint, to: &uint) -> &'a str { self[][*from..*to] } } #[experimental = "waiting on Deref stabilization"] impl ops::Deref<str> for String { fn deref<'a>(&'a self) -> &'a str { unsafe { mem::transmute(self.vec[]) } } } /// Wrapper type providing a `&String` reference via `Deref`. #[experimental] pub struct DerefString<'a> { x: DerefVec<'a, u8> } impl<'a> Deref<String> for DerefString<'a> { fn deref<'b>(&'b self) -> &'b String { unsafe { mem::transmute(&*self.x) } } } /// Convert a string slice to a wrapper type providing a `&String` reference. /// /// # Examples /// /// ``` /// use std::string::as_string; /// /// fn string_consumer(s: String) { /// assert_eq!(s, "foo".to_string()); /// } /// /// let string = as_string("foo").clone(); /// string_consumer(string); /// ``` #[experimental] pub fn as_string<'a>(x: &'a str) -> DerefString<'a> { DerefString { x: as_vec(x.as_bytes()) } } impl FromStr for String { #[inline] fn from_str(s: &str) -> Option<String> { Some(String::from_str(s)) } } /// Trait for converting a type to a string, consuming it in the process. #[deprecated = "trait will be removed"] pub trait IntoString { /// Consume and convert to a string. fn into_string(self) -> String; } /// A generic trait for converting a value to a string pub trait ToString { /// Converts the value of `self` to an owned string fn to_string(&self) -> String; } impl<T: fmt::Show> ToString for T { fn to_string(&self) -> String { let mut buf = Vec::<u8>::new(); let _ = fmt::write(&mut buf, format_args!("{}", *self)); String::from_utf8(buf).unwrap() } } impl IntoCow<'static, String, str> for String { fn into_cow(self) -> CowString<'static> { Cow::Owned(self) } } impl<'a> IntoCow<'a, String, str> for &'a str { fn into_cow(self) -> CowString<'a> { Cow::Borrowed(self) } } /// Unsafe operations #[deprecated] pub mod raw { use super::String; use vec::Vec; /// Creates a new `String` from a length, capacity, and pointer. /// /// This is unsafe because: /// * We call `Vec::from_raw_parts` to get a `Vec<u8>`; /// * We assume that the `Vec` contains valid UTF-8. #[inline] #[deprecated = "renamed to String::from_raw_parts"] pub unsafe fn from_parts(buf: *mut u8, length: uint, capacity: uint) -> String { String::from_raw_parts(buf, length, capacity) } /// Creates a `String` from a `*const u8` buffer of the given length. /// /// This function is unsafe because of two reasons: /// /// * A raw pointer is dereferenced and transmuted to `&[u8]`; /// * The slice is not checked to see whether it contains valid UTF-8. #[deprecated = "renamed to String::from_raw_buf_len"] pub unsafe fn from_buf_len(buf: *const u8, len: uint) -> String { String::from_raw_buf_len(buf, len) } /// Creates a `String` from a null-terminated `*const u8` buffer. /// /// This function is unsafe because we dereference memory until we find the NUL character, /// which is not guaranteed to be present. Additionally, the slice is not checked to see /// whether it contains valid UTF-8 #[deprecated = "renamed to String::from_raw_buf"] pub unsafe fn from_buf(buf: *const u8) -> String { String::from_raw_buf(buf) } /// Converts a vector of bytes to a new `String` without checking if /// it contains valid UTF-8. This is unsafe because it assumes that /// the UTF-8-ness of the vector has already been validated. #[inline] #[deprecated = "renamed to String::from_utf8_unchecked"] pub unsafe fn from_utf8(bytes: Vec<u8>) -> String { String::from_utf8_unchecked(bytes) } } /// A clone-on-write string #[stable] pub type CowString<'a> = Cow<'a, String, str>; #[allow(deprecated)] impl<'a> Str for CowString<'a> { #[inline] fn as_slice<'b>(&'b self) -> &'b str { (**self).as_slice() } } #[cfg(test)] mod tests { use prelude::*; use test::Bencher; use str::Utf8Error; use str; use super::as_string; #[test] fn test_as_string() { let x = "foo"; assert_eq!(x, as_string(x).as_slice()); } #[test] fn test_from_str() { let owned: Option<::std::string::String> = from_str("string"); assert_eq!(owned.as_ref().map(|s| s.as_slice()), Some("string")); } #[test] fn test_from_utf8() { let xs = b"hello".to_vec(); assert_eq!(String::from_utf8(xs).unwrap(), String::from_str("hello")); let xs = "ศไทย中华Việt Nam".as_bytes().to_vec(); assert_eq!(String::from_utf8(xs).unwrap(), String::from_str("ศไทย中华Việt Nam")); let xs = b"hello\xFF".to_vec(); let err = String::from_utf8(xs).err().unwrap(); assert_eq!(err.utf8_error(), Utf8Error::TooShort); assert_eq!(err.into_bytes(), b"hello\xff".to_vec()); } #[test] fn test_from_utf8_lossy() { let xs = b"hello"; let ys: str::CowString = "hello".into_cow(); assert_eq!(String::from_utf8_lossy(xs), ys); let xs = "ศไทย中华Việt Nam".as_bytes(); let ys: str::CowString = "ศไทย中华Việt Nam".into_cow(); assert_eq!(String::from_utf8_lossy(xs), ys); let xs = b"Hello\xC2 There\xFF Goodbye"; assert_eq!(String::from_utf8_lossy(xs), String::from_str("Hello\u{FFFD} There\u{FFFD} Goodbye").into_cow()); let xs = b"Hello\xC0\x80 There\xE6\x83 Goodbye"; assert_eq!(String::from_utf8_lossy(xs), String::from_str("Hello\u{FFFD}\u{FFFD} There\u{FFFD} Goodbye").into_cow()); let xs = b"\xF5foo\xF5\x80bar"; assert_eq!(String::from_utf8_lossy(xs), String::from_str("\u{FFFD}foo\u{FFFD}\u{FFFD}bar").into_cow()); let xs = b"\xF1foo\xF1\x80bar\xF1\x80\x80baz"; assert_eq!(String::from_utf8_lossy(xs), String::from_str("\u{FFFD}foo\u{FFFD}bar\u{FFFD}baz").into_cow()); let xs = b"\xF4foo\xF4\x80bar\xF4\xBFbaz"; assert_eq!(String::from_utf8_lossy(xs), String::from_str("\u{FFFD}foo\u{FFFD}bar\u{FFFD}\u{FFFD}baz").into_cow()); let xs = b"\xF0\x80\x80\x80foo\xF0\x90\x80\x80bar"; assert_eq!(String::from_utf8_lossy(xs), String::from_str("\u{FFFD}\u{FFFD}\u{FFFD}\u{FFFD}\ foo\u{10000}bar").into_cow()); // surrogates let xs = b"\xED\xA0\x80foo\xED\xBF\xBFbar"; assert_eq!(String::from_utf8_lossy(xs), String::from_str("\u{FFFD}\u{FFFD}\u{FFFD}foo\ \u{FFFD}\u{FFFD}\u{FFFD}bar").into_cow()); } #[test] fn test_from_utf16() { let pairs = [(String::from_str("𐍅𐌿𐌻𐍆𐌹𐌻𐌰\n"), vec![0xd800_u16, 0xdf45_u16, 0xd800_u16, 0xdf3f_u16, 0xd800_u16, 0xdf3b_u16, 0xd800_u16, 0xdf46_u16, 0xd800_u16, 0xdf39_u16, 0xd800_u16, 0xdf3b_u16, 0xd800_u16, 0xdf30_u16, 0x000a_u16]), (String::from_str("𐐒𐑉𐐮𐑀𐐲𐑋 𐐏𐐲𐑍\n"), vec![0xd801_u16, 0xdc12_u16, 0xd801_u16, 0xdc49_u16, 0xd801_u16, 0xdc2e_u16, 0xd801_u16, 0xdc40_u16, 0xd801_u16, 0xdc32_u16, 0xd801_u16, 0xdc4b_u16, 0x0020_u16, 0xd801_u16, 0xdc0f_u16, 0xd801_u16, 0xdc32_u16, 0xd801_u16, 0xdc4d_u16, 0x000a_u16]), (String::from_str("𐌀𐌖𐌋𐌄𐌑𐌉·𐌌𐌄𐌕𐌄𐌋𐌉𐌑\n"), vec![0xd800_u16, 0xdf00_u16, 0xd800_u16, 0xdf16_u16, 0xd800_u16, 0xdf0b_u16, 0xd800_u16, 0xdf04_u16, 0xd800_u16, 0xdf11_u16, 0xd800_u16, 0xdf09_u16, 0x00b7_u16, 0xd800_u16, 0xdf0c_u16, 0xd800_u16, 0xdf04_u16, 0xd800_u16, 0xdf15_u16, 0xd800_u16, 0xdf04_u16, 0xd800_u16, 0xdf0b_u16, 0xd800_u16, 0xdf09_u16, 0xd800_u16, 0xdf11_u16, 0x000a_u16 ]), (String::from_str("𐒋𐒘𐒈𐒑𐒛𐒒 𐒕𐒓 𐒈𐒚𐒍 𐒏𐒜𐒒𐒖𐒆 𐒕𐒆\n"), vec![0xd801_u16, 0xdc8b_u16, 0xd801_u16, 0xdc98_u16, 0xd801_u16, 0xdc88_u16, 0xd801_u16, 0xdc91_u16, 0xd801_u16, 0xdc9b_u16, 0xd801_u16, 0xdc92_u16, 0x0020_u16, 0xd801_u16, 0xdc95_u16, 0xd801_u16, 0xdc93_u16, 0x0020_u16, 0xd801_u16, 0xdc88_u16, 0xd801_u16, 0xdc9a_u16, 0xd801_u16, 0xdc8d_u16, 0x0020_u16, 0xd801_u16, 0xdc8f_u16, 0xd801_u16, 0xdc9c_u16, 0xd801_u16, 0xdc92_u16, 0xd801_u16, 0xdc96_u16, 0xd801_u16, 0xdc86_u16, 0x0020_u16, 0xd801_u16, 0xdc95_u16, 0xd801_u16, 0xdc86_u16, 0x000a_u16 ]), // Issue #12318, even-numbered non-BMP planes (String::from_str("\u{20000}"), vec![0xD840, 0xDC00])]; for p in pairs.iter() { let (s, u) = (*p).clone(); let s_as_utf16 = s.utf16_units().collect::<Vec<u16>>(); let u_as_string = String::from_utf16(u.as_slice()).unwrap(); assert!(::unicode::str::is_utf16(u.as_slice())); assert_eq!(s_as_utf16, u); assert_eq!(u_as_string, s); assert_eq!(String::from_utf16_lossy(u.as_slice()), s); assert_eq!(String::from_utf16(s_as_utf16.as_slice()).unwrap(), s); assert_eq!(u_as_string.utf16_units().collect::<Vec<u16>>(), u); } } #[test] fn test_utf16_invalid() { // completely positive cases tested above. // lead + eof assert!(String::from_utf16(&[0xD800]).is_err()); // lead + lead assert!(String::from_utf16(&[0xD800, 0xD800]).is_err()); // isolated trail assert!(String::from_utf16(&[0x0061, 0xDC00]).is_err()); // general assert!(String::from_utf16(&[0xD800, 0xd801, 0xdc8b, 0xD800]).is_err()); } #[test] fn test_from_utf16_lossy() { // completely positive cases tested above. // lead + eof assert_eq!(String::from_utf16_lossy(&[0xD800]), String::from_str("\u{FFFD}")); // lead + lead assert_eq!(String::from_utf16_lossy(&[0xD800, 0xD800]), String::from_str("\u{FFFD}\u{FFFD}")); // isolated trail assert_eq!(String::from_utf16_lossy(&[0x0061, 0xDC00]), String::from_str("a\u{FFFD}")); // general assert_eq!(String::from_utf16_lossy(&[0xD800, 0xd801, 0xdc8b, 0xD800]), String::from_str("\u{FFFD}𐒋\u{FFFD}")); } #[test] fn test_from_buf_len() { unsafe { let a = vec![65u8, 65, 65, 65, 65, 65, 65, 0]; assert_eq!(super::raw::from_buf_len(a.as_ptr(), 3), String::from_str("AAA")); } } #[test] fn test_from_buf() { unsafe { let a = vec![65, 65, 65, 65, 65, 65, 65, 0]; let b = a.as_ptr(); let c = super::raw::from_buf(b); assert_eq!(c, String::from_str("AAAAAAA")); } } #[test] fn test_push_bytes() { let mut s = String::from_str("ABC"); unsafe { let mv = s.as_mut_vec(); mv.push_all(&[b'D']); } assert_eq!(s, "ABCD"); } #[test] fn test_push_str() { let mut s = String::new(); s.push_str(""); assert_eq!(s.slice_from(0), ""); s.push_str("abc"); assert_eq!(s.slice_from(0), "abc"); s.push_str("ประเทศไทย中华Việt Nam"); assert_eq!(s.slice_from(0), "abcประเทศไทย中华Việt Nam"); } #[test] fn test_push() { let mut data = String::from_str("ประเทศไทย中"); data.push('华'); data.push('b'); // 1 byte data.push('¢'); // 2 byte data.push('€'); // 3 byte data.push('𤭢'); // 4 byte assert_eq!(data, "ประเทศไทย中华b¢€𤭢"); } #[test] fn test_pop() { let mut data = String::from_str("ประเทศไทย中华b¢€𤭢"); assert_eq!(data.pop().unwrap(), '𤭢'); // 4 bytes assert_eq!(data.pop().unwrap(), '€'); // 3 bytes assert_eq!(data.pop().unwrap(), '¢'); // 2 bytes assert_eq!(data.pop().unwrap(), 'b'); // 1 bytes assert_eq!(data.pop().unwrap(), '华'); assert_eq!(data, "ประเทศไทย中"); } #[test] fn test_str_truncate() { let mut s = String::from_str("12345"); s.truncate(5); assert_eq!(s, "12345"); s.truncate(3); assert_eq!(s, "123"); s.truncate(0); assert_eq!(s, ""); let mut s = String::from_str("12345"); let p = s.as_ptr(); s.truncate(3); s.push_str("6"); let p_ = s.as_ptr(); assert_eq!(p_, p); } #[test] #[should_fail] fn test_str_truncate_invalid_len() { let mut s = String::from_str("12345"); s.truncate(6); } #[test] #[should_fail] fn test_str_truncate_split_codepoint() { let mut s = String::from_str("\u{FC}"); // ü s.truncate(1); } #[test] fn test_str_clear() { let mut s = String::from_str("12345"); s.clear(); assert_eq!(s.len(), 0); assert_eq!(s, ""); } #[test] fn test_str_add() { let a = String::from_str("12345"); let b = a + "2"; let b = b + "2"; assert_eq!(b.len(), 7); assert_eq!(b, "1234522"); } #[test] fn remove() { let mut s = "ศไทย中华Việt Nam; foobar".to_string();; assert_eq!(s.remove(0), 'ศ'); assert_eq!(s.len(), 33); assert_eq!(s, "ไทย中华Việt Nam; foobar"); assert_eq!(s.remove(17), 'ệ'); assert_eq!(s, "ไทย中华Vit Nam; foobar"); } #[test] #[should_fail] fn remove_bad() { "ศ".to_string().remove(1); } #[test] fn insert() { let mut s = "foobar".to_string(); s.insert(0, 'ệ'); assert_eq!(s, "ệfoobar"); s.insert(6, 'ย'); assert_eq!(s, "ệfooยbar"); } #[test] #[should_fail] fn insert_bad1() { "".to_string().insert(1, 't'); } #[test] #[should_fail] fn insert_bad2() { "ệ".to_string().insert(1, 't'); } #[test] fn test_slicing() { let s = "foobar".to_string(); assert_eq!("foobar", s[]); assert_eq!("foo", s[..3]); assert_eq!("bar", s[3..]); assert_eq!("oob", s[1..4]); } #[test] fn test_simple_types() { assert_eq!(1i.to_string(), "1"); assert_eq!((-1i).to_string(), "-1"); assert_eq!(200u.to_string(), "200"); assert_eq!(2u8.to_string(), "2"); assert_eq!(true.to_string(), "true"); assert_eq!(false.to_string(), "false"); assert_eq!(().to_string(), "()"); assert_eq!(("hi".to_string()).to_string(), "hi"); } #[test] fn test_vectors() { let x: Vec<int> = vec![]; assert_eq!(x.to_string(), "[]"); assert_eq!((vec![1i]).to_string(), "[1]"); assert_eq!((vec![1i, 2, 3]).to_string(), "[1, 2, 3]"); assert!((vec![vec![], vec![1i], vec![1i, 1]]).to_string() == "[[], [1], [1, 1]]"); } #[test] fn test_from_iterator() { let s = "ศไทย中华Việt Nam".to_string(); let t = "ศไทย中华"; let u = "Việt Nam"; let a: String = s.chars().collect(); assert_eq!(s, a); let mut b = t.to_string(); b.extend(u.chars()); assert_eq!(s, b); let c: String = vec![t, u].into_iter().collect(); assert_eq!(s, c); let mut d = t.to_string(); d.extend(vec![u].into_iter()); assert_eq!(s, d); } #[bench] fn bench_with_capacity(b: &mut Bencher) { b.iter(|| { String::with_capacity(100) }); } #[bench] fn bench_push_str(b: &mut Bencher) { let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; b.iter(|| { let mut r = String::new(); r.push_str(s); }); } const REPETITIONS: u64 = 10_000; #[bench] fn bench_push_str_one_byte(b: &mut Bencher) { b.bytes = REPETITIONS; b.iter(|| { let mut r = String::new(); for _ in range(0, REPETITIONS) { r.push_str("a") } }); } #[bench] fn bench_push_char_one_byte(b: &mut Bencher) { b.bytes = REPETITIONS; b.iter(|| { let mut r = String::new(); for _ in range(0, REPETITIONS) { r.push('a') } }); } #[bench] fn bench_push_char_two_bytes(b: &mut Bencher) { b.bytes = REPETITIONS * 2; b.iter(|| { let mut r = String::new(); for _ in range(0, REPETITIONS) { r.push('â') } }); } #[bench] fn from_utf8_lossy_100_ascii(b: &mut Bencher) { let s = b"Hello there, the quick brown fox jumped over the lazy dog! \ Lorem ipsum dolor sit amet, consectetur. "; assert_eq!(100, s.len()); b.iter(|| { let _ = String::from_utf8_lossy(s); }); } #[bench] fn from_utf8_lossy_100_multibyte(b: &mut Bencher) { let s = "𐌀𐌖𐌋𐌄𐌑𐌉ปรدولة الكويتทศไทย中华𐍅𐌿𐌻𐍆𐌹𐌻𐌰".as_bytes(); assert_eq!(100, s.len()); b.iter(|| { let _ = String::from_utf8_lossy(s); }); } #[bench] fn from_utf8_lossy_invalid(b: &mut Bencher) { let s = b"Hello\xC0\x80 There\xE6\x83 Goodbye"; b.iter(|| { let _ = String::from_utf8_lossy(s); }); } #[bench] fn from_utf8_lossy_100_invalid(b: &mut Bencher) { let s = Vec::from_elem(100, 0xF5u8); b.iter(|| { let _ = String::from_utf8_lossy(s.as_slice()); }); } }
29.475971
99
0.51617
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_as_string() {\n let x = \"foo\";\n assert_eq!(x, as_string(x).as_slice());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_from_str() {\n let owned: Option<::std::string::String> = from_str(\"string\");\n assert_eq!(owned.as_ref().map(|s| s.as_slice()), Some(\"string\"));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_from_utf8() {\n let xs = b\"hello\".to_vec();\n assert_eq!(String::from_utf8(xs).unwrap(),\n String::from_str(\"hello\"));\n\n let xs = \"ศไทย中华Việt Nam\".as_bytes().to_vec();\n assert_eq!(String::from_utf8(xs).unwrap(),\n String::from_str(\"ศไทย中华Việt Nam\"));\n\n let xs = b\"hello\\xFF\".to_vec();\n let err = String::from_utf8(xs).err().unwrap();\n assert_eq!(err.utf8_error(), Utf8Error::TooShort);\n assert_eq!(err.into_bytes(), b\"hello\\xff\".to_vec());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_from_utf8_lossy() {\n let xs = b\"hello\";\n let ys: str::CowString = \"hello\".into_cow();\n assert_eq!(String::from_utf8_lossy(xs), ys);\n\n let xs = \"ศไทย中华Việt Nam\".as_bytes();\n let ys: str::CowString = \"ศไทย中华Việt Nam\".into_cow();\n assert_eq!(String::from_utf8_lossy(xs), ys);\n\n let xs = b\"Hello\\xC2 There\\xFF Goodbye\";\n assert_eq!(String::from_utf8_lossy(xs),\n String::from_str(\"Hello\\u{FFFD} There\\u{FFFD} Goodbye\").into_cow());\n\n let xs = b\"Hello\\xC0\\x80 There\\xE6\\x83 Goodbye\";\n assert_eq!(String::from_utf8_lossy(xs),\n String::from_str(\"Hello\\u{FFFD}\\u{FFFD} There\\u{FFFD} Goodbye\").into_cow());\n\n let xs = b\"\\xF5foo\\xF5\\x80bar\";\n assert_eq!(String::from_utf8_lossy(xs),\n String::from_str(\"\\u{FFFD}foo\\u{FFFD}\\u{FFFD}bar\").into_cow());\n\n let xs = b\"\\xF1foo\\xF1\\x80bar\\xF1\\x80\\x80baz\";\n assert_eq!(String::from_utf8_lossy(xs),\n String::from_str(\"\\u{FFFD}foo\\u{FFFD}bar\\u{FFFD}baz\").into_cow());\n\n let xs = b\"\\xF4foo\\xF4\\x80bar\\xF4\\xBFbaz\";\n assert_eq!(String::from_utf8_lossy(xs),\n String::from_str(\"\\u{FFFD}foo\\u{FFFD}bar\\u{FFFD}\\u{FFFD}baz\").into_cow());\n\n let xs = b\"\\xF0\\x80\\x80\\x80foo\\xF0\\x90\\x80\\x80bar\";\n assert_eq!(String::from_utf8_lossy(xs), String::from_str(\"\\u{FFFD}\\u{FFFD}\\u{FFFD}\\u{FFFD}\\\n foo\\u{10000}bar\").into_cow());\n\n // surrogates\n let xs = b\"\\xED\\xA0\\x80foo\\xED\\xBF\\xBFbar\";\n assert_eq!(String::from_utf8_lossy(xs), String::from_str(\"\\u{FFFD}\\u{FFFD}\\u{FFFD}foo\\\n \\u{FFFD}\\u{FFFD}\\u{FFFD}bar\").into_cow());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_from_utf16() {\n let pairs =\n [(String::from_str(\"𐍅𐌿𐌻𐍆𐌹𐌻𐌰\\n\"),\n vec![0xd800_u16, 0xdf45_u16, 0xd800_u16, 0xdf3f_u16,\n 0xd800_u16, 0xdf3b_u16, 0xd800_u16, 0xdf46_u16,\n 0xd800_u16, 0xdf39_u16, 0xd800_u16, 0xdf3b_u16,\n 0xd800_u16, 0xdf30_u16, 0x000a_u16]),\n\n (String::from_str(\"𐐒𐑉𐐮𐑀𐐲𐑋 𐐏𐐲𐑍\\n\"),\n vec![0xd801_u16, 0xdc12_u16, 0xd801_u16,\n 0xdc49_u16, 0xd801_u16, 0xdc2e_u16, 0xd801_u16,\n 0xdc40_u16, 0xd801_u16, 0xdc32_u16, 0xd801_u16,\n 0xdc4b_u16, 0x0020_u16, 0xd801_u16, 0xdc0f_u16,\n 0xd801_u16, 0xdc32_u16, 0xd801_u16, 0xdc4d_u16,\n 0x000a_u16]),\n\n (String::from_str(\"𐌀𐌖𐌋𐌄𐌑𐌉·𐌌𐌄𐌕𐌄𐌋𐌉𐌑\\n\"),\n vec![0xd800_u16, 0xdf00_u16, 0xd800_u16, 0xdf16_u16,\n 0xd800_u16, 0xdf0b_u16, 0xd800_u16, 0xdf04_u16,\n 0xd800_u16, 0xdf11_u16, 0xd800_u16, 0xdf09_u16,\n 0x00b7_u16, 0xd800_u16, 0xdf0c_u16, 0xd800_u16,\n 0xdf04_u16, 0xd800_u16, 0xdf15_u16, 0xd800_u16,\n 0xdf04_u16, 0xd800_u16, 0xdf0b_u16, 0xd800_u16,\n 0xdf09_u16, 0xd800_u16, 0xdf11_u16, 0x000a_u16 ]),\n\n (String::from_str(\"𐒋𐒘𐒈𐒑𐒛𐒒 𐒕𐒓 𐒈𐒚𐒍 𐒏𐒜𐒒𐒖𐒆 𐒕𐒆\\n\"),\n vec![0xd801_u16, 0xdc8b_u16, 0xd801_u16, 0xdc98_u16,\n 0xd801_u16, 0xdc88_u16, 0xd801_u16, 0xdc91_u16,\n 0xd801_u16, 0xdc9b_u16, 0xd801_u16, 0xdc92_u16,\n 0x0020_u16, 0xd801_u16, 0xdc95_u16, 0xd801_u16,\n 0xdc93_u16, 0x0020_u16, 0xd801_u16, 0xdc88_u16,\n 0xd801_u16, 0xdc9a_u16, 0xd801_u16, 0xdc8d_u16,\n 0x0020_u16, 0xd801_u16, 0xdc8f_u16, 0xd801_u16,\n 0xdc9c_u16, 0xd801_u16, 0xdc92_u16, 0xd801_u16,\n 0xdc96_u16, 0xd801_u16, 0xdc86_u16, 0x0020_u16,\n 0xd801_u16, 0xdc95_u16, 0xd801_u16, 0xdc86_u16,\n 0x000a_u16 ]),\n // Issue #12318, even-numbered non-BMP planes\n (String::from_str(\"\\u{20000}\"),\n vec![0xD840, 0xDC00])];\n\n for p in pairs.iter() {\n let (s, u) = (*p).clone();\n let s_as_utf16 = s.utf16_units().collect::<Vec<u16>>();\n let u_as_string = String::from_utf16(u.as_slice()).unwrap();\n\n assert!(::unicode::str::is_utf16(u.as_slice()));\n assert_eq!(s_as_utf16, u);\n\n assert_eq!(u_as_string, s);\n assert_eq!(String::from_utf16_lossy(u.as_slice()), s);\n\n assert_eq!(String::from_utf16(s_as_utf16.as_slice()).unwrap(), s);\n assert_eq!(u_as_string.utf16_units().collect::<Vec<u16>>(), u);\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_utf16_invalid() {\n // completely positive cases tested above.\n // lead + eof\n assert!(String::from_utf16(&[0xD800]).is_err());\n // lead + lead\n assert!(String::from_utf16(&[0xD800, 0xD800]).is_err());\n\n // isolated trail\n assert!(String::from_utf16(&[0x0061, 0xDC00]).is_err());\n\n // general\n assert!(String::from_utf16(&[0xD800, 0xd801, 0xdc8b, 0xD800]).is_err());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_from_utf16_lossy() {\n // completely positive cases tested above.\n // lead + eof\n assert_eq!(String::from_utf16_lossy(&[0xD800]), String::from_str(\"\\u{FFFD}\"));\n // lead + lead\n assert_eq!(String::from_utf16_lossy(&[0xD800, 0xD800]),\n String::from_str(\"\\u{FFFD}\\u{FFFD}\"));\n\n // isolated trail\n assert_eq!(String::from_utf16_lossy(&[0x0061, 0xDC00]), String::from_str(\"a\\u{FFFD}\"));\n\n // general\n assert_eq!(String::from_utf16_lossy(&[0xD800, 0xd801, 0xdc8b, 0xD800]),\n String::from_str(\"\\u{FFFD}𐒋\\u{FFFD}\"));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_from_buf_len() {\n unsafe {\n let a = vec![65u8, 65, 65, 65, 65, 65, 65, 0];\n assert_eq!(super::raw::from_buf_len(a.as_ptr(), 3), String::from_str(\"AAA\"));\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_from_buf() {\n unsafe {\n let a = vec![65, 65, 65, 65, 65, 65, 65, 0];\n let b = a.as_ptr();\n let c = super::raw::from_buf(b);\n assert_eq!(c, String::from_str(\"AAAAAAA\"));\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_push_bytes() {\n let mut s = String::from_str(\"ABC\");\n unsafe {\n let mv = s.as_mut_vec();\n mv.push_all(&[b'D']);\n }\n assert_eq!(s, \"ABCD\");\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_push_str() {\n let mut s = String::new();\n s.push_str(\"\");\n assert_eq!(s.slice_from(0), \"\");\n s.push_str(\"abc\");\n assert_eq!(s.slice_from(0), \"abc\");\n s.push_str(\"ประเทศไทย中华Việt Nam\");\n assert_eq!(s.slice_from(0), \"abcประเทศไทย中华Việt Nam\");\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_push() {\n let mut data = String::from_str(\"ประเทศไทย中\");\n data.push('华');\n data.push('b'); // 1 byte\n data.push('¢'); // 2 byte\n data.push('€'); // 3 byte\n data.push('𤭢'); // 4 byte\n assert_eq!(data, \"ประเทศไทย中华b¢€𤭢\");\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_pop() {\n let mut data = String::from_str(\"ประเทศไทย中华b¢€𤭢\");\n assert_eq!(data.pop().unwrap(), '𤭢'); // 4 bytes\n assert_eq!(data.pop().unwrap(), '€'); // 3 bytes\n assert_eq!(data.pop().unwrap(), '¢'); // 2 bytes\n assert_eq!(data.pop().unwrap(), 'b'); // 1 bytes\n assert_eq!(data.pop().unwrap(), '华');\n assert_eq!(data, \"ประเทศไทย中\");\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_str_truncate() {\n let mut s = String::from_str(\"12345\");\n s.truncate(5);\n assert_eq!(s, \"12345\");\n s.truncate(3);\n assert_eq!(s, \"123\");\n s.truncate(0);\n assert_eq!(s, \"\");\n\n let mut s = String::from_str(\"12345\");\n let p = s.as_ptr();\n s.truncate(3);\n s.push_str(\"6\");\n let p_ = s.as_ptr();\n assert_eq!(p_, p);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_str_truncate_invalid_len() {\n let mut s = String::from_str(\"12345\");\n s.truncate(6);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_str_truncate_split_codepoint() {\n let mut s = String::from_str(\"\\u{FC}\"); // ü\n s.truncate(1);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_str_clear() {\n let mut s = String::from_str(\"12345\");\n s.clear();\n assert_eq!(s.len(), 0);\n assert_eq!(s, \"\");\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_str_add() {\n let a = String::from_str(\"12345\");\n let b = a + \"2\";\n let b = b + \"2\";\n assert_eq!(b.len(), 7);\n assert_eq!(b, \"1234522\");\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn remove() {\n let mut s = \"ศไทย中华Việt Nam; foobar\".to_string();;\n assert_eq!(s.remove(0), 'ศ');\n assert_eq!(s.len(), 33);\n assert_eq!(s, \"ไทย中华Việt Nam; foobar\");\n assert_eq!(s.remove(17), 'ệ');\n assert_eq!(s, \"ไทย中华Vit Nam; foobar\");\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn remove_bad() {\n \"ศ\".to_string().remove(1);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn insert() {\n let mut s = \"foobar\".to_string();\n s.insert(0, 'ệ');\n assert_eq!(s, \"ệfoobar\");\n s.insert(6, 'ย');\n assert_eq!(s, \"ệfooยbar\");\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn insert_bad1() { \"\".to_string().insert(1, 't'); }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn insert_bad2() { \"ệ\".to_string().insert(1, 't'); }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_slicing() {\n let s = \"foobar\".to_string();\n assert_eq!(\"foobar\", s[]);\n assert_eq!(\"foo\", s[..3]);\n assert_eq!(\"bar\", s[3..]);\n assert_eq!(\"oob\", s[1..4]);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_simple_types() {\n assert_eq!(1i.to_string(), \"1\");\n assert_eq!((-1i).to_string(), \"-1\");\n assert_eq!(200u.to_string(), \"200\");\n assert_eq!(2u8.to_string(), \"2\");\n assert_eq!(true.to_string(), \"true\");\n assert_eq!(false.to_string(), \"false\");\n assert_eq!(().to_string(), \"()\");\n assert_eq!((\"hi\".to_string()).to_string(), \"hi\");\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_vectors() {\n let x: Vec<int> = vec![];\n assert_eq!(x.to_string(), \"[]\");\n assert_eq!((vec![1i]).to_string(), \"[1]\");\n assert_eq!((vec![1i, 2, 3]).to_string(), \"[1, 2, 3]\");\n assert!((vec![vec![], vec![1i], vec![1i, 1]]).to_string() ==\n \"[[], [1], [1, 1]]\");\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_from_iterator() {\n let s = \"ศไทย中华Việt Nam\".to_string();\n let t = \"ศไทย中华\";\n let u = \"Việt Nam\";\n\n let a: String = s.chars().collect();\n assert_eq!(s, a);\n\n let mut b = t.to_string();\n b.extend(u.chars());\n assert_eq!(s, b);\n\n let c: String = vec![t, u].into_iter().collect();\n assert_eq!(s, c);\n\n let mut d = t.to_string();\n d.extend(vec![u].into_iter());\n assert_eq!(s, d);\n }\n}" ]
f70405ca51d91637498909e1a34332c669071d30
773
rs
Rust
tests/spec/non_conformant/basic/t59_if_expression.rs
becmer/rsass
7ef8b2ce6c283e5c3546640e57e62f306ed96c32
[ "Apache-2.0" ]
329
2017-02-18T12:39:56.000Z
2022-03-31T06:52:18.000Z
tests/spec/non_conformant/basic/t59_if_expression.rs
becmer/rsass
7ef8b2ce6c283e5c3546640e57e62f306ed96c32
[ "Apache-2.0" ]
73
2017-04-28T19:26:26.000Z
2022-03-05T15:51:09.000Z
tests/spec/non_conformant/basic/t59_if_expression.rs
becmer/rsass
7ef8b2ce6c283e5c3546640e57e62f306ed96c32
[ "Apache-2.0" ]
27
2017-06-05T23:02:14.000Z
2022-03-28T00:42:52.000Z
//! Tests auto-converted from "sass-spec/spec/non_conformant/basic/59_if_expression.hrx" #[allow(unused)] fn runner() -> crate::TestRunner { super::runner() } #[test] fn test() { assert_eq!( runner().ok( "$x: 0;\ \n$if-false: whatever;\n\ \ndiv {\ \n foo: if($if-true: hey, $if-false: ho, $condition: true);\ \n foo: if($if-true: hey, $if-false: ho, $condition: false);\ \n foo: if($x != 0, if($x, true, false), unquote(\"x is zero\"));\ \n foo: if(false, 1/0, $if-false: $if-false);\ \n}" ), "div {\ \n foo: hey;\ \n foo: ho;\ \n foo: x is zero;\ \n foo: whatever;\ \n}\n" ); }
26.655172
88
0.446313
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test() {\n assert_eq!(\n runner().ok(\n \"$x: 0;\\\n \\n$if-false: whatever;\\n\\\n \\ndiv {\\\n \\n foo: if($if-true: hey, $if-false: ho, $condition: true);\\\n \\n foo: if($if-true: hey, $if-false: ho, $condition: false);\\\n \\n foo: if($x != 0, if($x, true, false), unquote(\\\"x is zero\\\"));\\\n \\n foo: if(false, 1/0, $if-false: $if-false);\\\n \\n}\"\n ),\n \"div {\\\n \\n foo: hey;\\\n \\n foo: ho;\\\n \\n foo: x is zero;\\\n \\n foo: whatever;\\\n \\n}\\n\"\n );\n}\n}" ]
f7040a46bf0373e5f9ccb8a5dfb7dc41f73282f8
8,791
rs
Rust
src/lib.rs
umaYnit/shadow-rs
5f964033fcf5ae9d4e12004ed93ed283fc295ddb
[ "MIT" ]
null
null
null
src/lib.rs
umaYnit/shadow-rs
5f964033fcf5ae9d4e12004ed93ed283fc295ddb
[ "MIT" ]
null
null
null
src/lib.rs
umaYnit/shadow-rs
5f964033fcf5ae9d4e12004ed93ed283fc295ddb
[ "MIT" ]
null
null
null
//! `shadow-rs` is a build script write by Rust //! //! It's can record compiled project much information. //! Like version info,dependence info.Like shadow,if compiled,never change.forever follow your project. //! //! Generated rust const by exec:`cargo build` //! //! # Example //! //! ``` //! pub const RUST_VERSION :&str = "rustc 1.45.0 (5c1f21c3b 2020-07-13)"; //! pub const BUILD_RUST_CHANNEL :&str = "debug"; //! pub const COMMIT_AUTHOR :&str = "baoyachi"; //! pub const BUILD_TIME :&str = "2020-08-16 13:48:52"; //! pub const COMMIT_DATE :&str = "2020-08-16 13:12:52"; //! pub const COMMIT_EMAIL :&str = "xxx@gmail.com"; //! pub const PROJECT_NAME :&str = "shadow-rs"; //! pub const RUST_CHANNEL :&str = "stable-x86_64-apple-darwin (default)"; //! pub const BRANCH :&str = "master"; //! pub const CARGO_LOCK :&str = r#" //! ├── chrono v0.4.19 //! │ ├── libc v0.2.80 //! │ ├── num-integer v0.1.44 //! │ │ └── num-traits v0.2.14 //! │ │ [build-dependencies] //! │ │ └── autocfg v1.0.1 //! │ ├── num-traits v0.2.14 (*) //! │ └── time v0.1.44 //! │ └── libc v0.2.80 //! └── git2 v0.13.12 //! ├── log v0.4.11 //! │ └── cfg-if v0.1.10 //! └── url v2.2.0 //! ├── form_urlencoded v1.0.0 //! │ └── percent-encoding v2.1.0 //! └── percent-encoding v2.1.0"#; //! pub const CARGO_VERSION :&str = "cargo 1.45.0 (744bd1fbb 2020-06-15)"; //! pub const BUILD_OS :&str = "macos-x86_64"; //! pub const COMMIT_HASH :&str = "386741540d73c194a3028b96b92fdeb53ca2788a"; //! pub const PKG_VERSION :&str = "0.3.13"; //! ``` //! # Quick Start //! //! ## step 1 //! In your `cargo.toml` `packgae` with package add with below config //! //! ```toml //! [package] //! build = "build.rs" //! //! [dependencies] //! shadow-rs = "0.5" //! //! [build-dependencies] //! shadow-rs = "0.5" //! ``` //! //! ## step 2 //! In your project add file `build.rs`,then add with below config //! //! ```ignore //! fn main() -> shadow_rs::SdResult<()> { //! shadow_rs::new() //! } //! ``` //! //! ## step 3 //! In your project find `bin` rust file.It's usually `main.rs`, you can find `[bin]` file with `Cargo.toml`,then add with below config //! The `shadow!(build)` with `build` config,add Rust build mod in your project. You can also replace it(build) with other name. //! //! ```ignore //! #[macro_use] //! extern crate shadow_rs; //! //! shadow!(build); //! ``` //! //! ## step 4 //! Then you can use const that's shadow build it(main.rs). //! //! The `build` mod just we use `shadow!(build)` generated. //! //! ```ignore //! fn main(){ //! println!("{}",build::version()); //print version() method //! println!("{}",build::BRANCH); //master //! println!("{}",build::SHORT_COMMIT);//8405e28e //! println!("{}",build::COMMIT_HASH);//8405e28e64080a09525a6cf1b07c22fcaf71a5c5 //! println!("{}",build::COMMIT_DATE);//2020-08-16T06:22:24+00:00 //! println!("{}",build::COMMIT_AUTHOR);//baoyachi //! println!("{}",build::COMMIT_EMAIL);//xxx@gmail.com //! //! println!("{}",build::BUILD_OS);//macos-x86_64 //! println!("{}",build::RUST_VERSION);//rustc 1.45.0 (5c1f21c3b 2020-07-13) //! println!("{}",build::RUST_CHANNEL);//stable-x86_64-apple-darwin (default) //! println!("{}",build::CARGO_VERSION);//cargo 1.45.0 (744bd1fbb 2020-06-15) //! println!("{}",build::PKG_VERSION);//0.3.13 //! println!("{}",build::CARGO_TREE); //like command:cargo tree //! //! println!("{}",build::PROJECT_NAME);//shadow-rs //! println!("{}",build::BUILD_TIME);//2020-08-16 14:50:25 //! println!("{}",build::BUILD_RUST_CHANNEL);//debug //! } //!``` //! //! ## Clap example //! And you can also use const with [clap](https://github.com/baoyachi/shadow-rs/blob/master/example_shadow/src/main.rs#L25_L27). //! //! For the user guide and futher documentation, please read //! [The shadow-rs document](https://github.com/baoyachi/shadow-rs). //! mod build; mod channel; mod ci; mod env; mod err; mod git; use build::*; use env::*; use git::*; use crate::ci::CIType; use std::collections::HashMap; use std::env as std_env; use std::fs::File; use std::io::Write; use std::path::Path; pub use channel::BuildRustChannel; use chrono::Local; pub use err::SdResult; const SHADOW_RS: &str = "shadow.rs"; /// Add a mod in project with `$build_mod`. /// /// You can use `shadow!(build_shadow)`. Then shadow-rs can help you add a mod with name `build_shadow`. /// Next, use mod with name `build_shadow`,and also use const like:`build_shadow::BRANCH`. /// /// Normally, you just config `shadow!(build)`.It looks more concise. #[macro_export] macro_rules! shadow { ($build_mod:ident) => { pub mod $build_mod { include!(concat!(env!("OUT_DIR"), "/shadow.rs")); } }; } pub fn new() -> SdResult<()> { change_detection(); let src_path = std::env::var("CARGO_MANIFEST_DIR")?; let out_path = std::env::var("OUT_DIR")?; Shadow::build(src_path, out_path) } fn change_detection() { git_change_detection(); } /// check the git info is changed, trigger rerun. fn git_change_detection() { println!("cargo:rerun-if-changed=.git/objects"); } #[derive(Debug)] pub(crate) struct Shadow { f: File, map: HashMap<ShadowConst, ConstVal>, std_env: HashMap<String, String>, } impl Shadow { fn get_env() -> HashMap<String, String> { let mut env_map = HashMap::new(); for (k, v) in std_env::vars() { env_map.insert(k, v); } env_map } /// try get current ci env fn try_ci(&self) -> CIType { if let Some(c) = self.std_env.get("GITLAB_CI") { if c == "true" { return CIType::Gitlab; } } if let Some(c) = self.std_env.get("GITHUB_ACTIONS") { if c == "true" { return CIType::Github; } } //TODO completed [travis,jenkins] env CIType::None } fn build(src_path: String, out_path: String) -> SdResult<()> { let out = { let path = Path::new(out_path.as_str()); if !out_path.ends_with('/') { path.join(format!("{}/{}", out_path, SHADOW_RS)) } else { path.join(SHADOW_RS) } }; let mut shadow = Shadow { f: File::create(out)?, map: Default::default(), std_env: Default::default(), }; shadow.std_env = Self::get_env(); let ci_type = shadow.try_ci(); let src_path = Path::new(src_path.as_str()); let mut map = new_git(&src_path, ci_type, &shadow.std_env); for (k, v) in new_project(&shadow.std_env) { map.insert(k, v); } for (k, v) in new_system_env(&shadow.std_env) { map.insert(k, v); } shadow.map = map; shadow.gen_const()?; //write version method shadow.write_version()?; Ok(()) } fn gen_const(&mut self) -> SdResult<()> { self.write_header()?; for (k, v) in self.map.clone() { self.write_const(k, v)?; } Ok(()) } fn write_header(&self) -> SdResult<()> { let desc = format!( r#"/// Code generated by shadow-rs generator. DO NOT EDIT. /// Author by:https://www.github.com/baoyachi /// The build script repository:https://github.com/baoyachi/shadow-rs /// create time by:{}"#, Local::now().format("%Y-%m-%d %H:%M:%S").to_string() ); writeln!(&self.f, "{}\n\n", desc)?; Ok(()) } fn write_const(&mut self, shadow_const: ShadowConst, val: ConstVal) -> SdResult<()> { let desc = format!("/// {}", val.desc); let (t, v) = match val.t { ConstType::OptStr => (ConstType::Str.to_string(), "".into()), ConstType::Str => (ConstType::Str.to_string(), val.v), }; let define = format!( "pub const {} :{} = r#\"{}\"#;", shadow_const.to_ascii_uppercase(), t, v ); writeln!(&self.f, "{}", desc)?; writeln!(&self.f, "{}\n", define)?; Ok(()) } fn write_version(&mut self) -> SdResult<()> { let desc: &str = "/// The common version method. It's so easy to use this method"; const VERSION_FN: &str = r##"#[warn(dead_code)] pub fn version() -> String { format!(r#" branch:{} commit_hash:{} build_time:{} build_env:{},{}"#, BRANCH, SHORT_COMMIT, BUILD_TIME, RUST_VERSION, RUST_CHANNEL ) }"##; writeln!(&self.f, "{}", desc)?; writeln!(&self.f, "{}\n", VERSION_FN)?; Ok(()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_build() -> SdResult<()> { Shadow::build("./".into(), "./".into())?; Ok(()) } }
28.542208
135
0.56046
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_build() -> SdResult<()> {\n Shadow::build(\"./\".into(), \"./\".into())?;\n Ok(())\n }\n}" ]
f7041372a09ccffa8748a7f0650cc49ba05c26f2
544
rs
Rust
native_implemented/otp/src/erlang/send_2/test/with_atom_destination.rs
mlwilkerson/lumen
048df6c0840c11496e2d15aa9af2e4a8d07a6e0f
[ "Apache-2.0" ]
2,939
2019-08-29T16:52:20.000Z
2022-03-31T05:42:30.000Z
native_implemented/otp/src/erlang/send_2/test/with_atom_destination.rs
mlwilkerson/lumen
048df6c0840c11496e2d15aa9af2e4a8d07a6e0f
[ "Apache-2.0" ]
235
2019-08-29T23:44:13.000Z
2022-03-17T11:43:25.000Z
native_implemented/otp/src/erlang/send_2/test/with_atom_destination.rs
mlwilkerson/lumen
048df6c0840c11496e2d15aa9af2e4a8d07a6e0f
[ "Apache-2.0" ]
95
2019-08-29T19:11:28.000Z
2022-01-03T05:14:16.000Z
use super::*; mod registered; #[test] fn unregistered_errors_badarg() { run!( |arc_process| { ( Just(arc_process.clone()), strategy::term::atom(), strategy::term(arc_process.clone()), ) }, |(arc_process, destination, message)| { prop_assert_badarg!( result(&arc_process, destination, message), format!("name ({}) not registered", destination) ); Ok(()) }, ); }
21.76
64
0.457721
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn unregistered_errors_badarg() {\n run!(\n |arc_process| {\n (\n Just(arc_process.clone()),\n strategy::term::atom(),\n strategy::term(arc_process.clone()),\n )\n },\n |(arc_process, destination, message)| {\n prop_assert_badarg!(\n result(&arc_process, destination, message),\n format!(\"name ({}) not registered\", destination)\n );\n\n Ok(())\n },\n );\n}\n}" ]
f7044d3ec93d760dcc3d6063096c373aaab80813
29,755
rs
Rust
zeropool-substrate-devnet/pallets/zeropool-substrate/src/lib.rs
w3f-community/zeropool-substrate
474a680cb8bf3f2d18e09a5682fff14d57367594
[ "Apache-2.0", "MIT" ]
null
null
null
zeropool-substrate-devnet/pallets/zeropool-substrate/src/lib.rs
w3f-community/zeropool-substrate
474a680cb8bf3f2d18e09a5682fff14d57367594
[ "Apache-2.0", "MIT" ]
null
null
null
zeropool-substrate-devnet/pallets/zeropool-substrate/src/lib.rs
w3f-community/zeropool-substrate
474a680cb8bf3f2d18e09a5682fff14d57367594
[ "Apache-2.0", "MIT" ]
null
null
null
// Zeropool Substrate Pallet // example to submit to test_groth16verify: {"proof":"Qexag8d0jvm1IWZywscRBuvdSEvlGuhvVg5Qj97vhS5VFas06bgj/yXiuZ+yJ/WZWCYDYq8e5HZPITpoaHAvGckDPBplyUtn8zZ3UI4f5E1uLmxlehAkzVK33Fp8/SEZX4v8OLLT3MP/FWhDvS43u2sLvZcCstjVjbarImuLiSA0IW7UmNgG7u8x99JExO0pp0EAGJ3PiBOzyZ/PhxUPBXvOgxhwNzx0nzZzp+aSY8yhsWxFWRl6UWzmS6J/ieUS1q5Tjwq9gs4qcX6+Q9WWRpvYVboY+f4d6smQyryKdB5Hi5E8/jWGPoD9tFJDN4KVnnESrKi7fVjH6A3twUaQEw==","input":"AwAAAMI1CN4U9DnKW3soxArLClszrtTa/MGicksQVWpir/QNW/hp3N50wmjr1CUHvGP6u6WnrdK7oRDtSHgjcjmUVyr8NQtA06gcVk9m3KPdmWele0Bx9AcLpToixRb2FCx/JQ=="} #![cfg_attr(not(feature = "std"), no_std)] extern crate alloc; use alloc::vec; use alt_serde::{Deserialize, Deserializer}; use borsh::{BorshDeserialize, BorshSerialize}; use ff_uint::{construct_uint, Uint}; use frame_support::{ decl_error, decl_event, decl_module, decl_storage, dispatch, ensure, traits::{Currency, EnsureOrigin, Get, OnUnbalanced, ReservableCurrency}, }; use frame_system::ensure_signed; use sp_std::prelude::*; pub mod alt_bn128; construct_uint! { pub struct U256(4); } pub type G1 = [U256; 2]; pub type G2 = [U256; 4]; pub type VU256 = sp_std::prelude::Vec<U256>; #[derive(Debug, Clone, BorshSerialize, BorshDeserialize, Default)] pub struct VK { alpha: G1, beta: G2, gamma: G2, delta: G2, ic: Vec<G1>, } #[derive(Debug, Clone, BorshSerialize, BorshDeserialize, Default)] pub struct Proof { a: G1, b: G2, c: G1, } #[serde(crate = "alt_serde")] #[derive(Deserialize, Default)] struct Jsonproofinput { #[serde(deserialize_with = "de_string_to_bytes")] proof: sp_std::prelude::Vec<u8>, #[serde(deserialize_with = "de_string_to_bytes")] input: sp_std::prelude::Vec<u8>, } type BalanceOf<T> = <<T as Trait>::Currency as Currency<<T as frame_system::Trait>::AccountId>>::Balance; type NegativeImbalanceOf<T> = <<T as Trait>::Currency as Currency<<T as frame_system::Trait>::AccountId>>::NegativeImbalance; pub trait Trait: frame_system::Trait { /// The overarching event type. type Event: From<Event<Self>> + Into<<Self as frame_system::Trait>::Event>; /// The currency trait. type Currency: ReservableCurrency<Self::AccountId>; /// Reservation fee. type ReservationFee: Get<BalanceOf<Self>>; /// What to do with slashed funds. type Slashed: OnUnbalanced<NegativeImbalanceOf<Self>>; /// The origin which may forcibly set or remove a proof. Root can always do this. type ForceOrigin: EnsureOrigin<Self::Origin>; /// The minimum length a proof may be. type MinLength: Get<usize>; /// The maximum length a proof may be. type MaxLength: Get<usize>; } decl_storage! { trait Store for Module<T: Trait> as Zeropool { /// The lookup table for verificationkey. VerificationKey: map hasher(twox_64_concat) T::AccountId => Option<(Vec<u8>, BalanceOf<T>)>; } } decl_event!( pub enum Event<T> where AccountId = <T as frame_system::Trait>::AccountId, { VerificationKeySet(AccountId), VerificationKeyUpdated(AccountId), VerificationSuccessful(AccountId), VerificationFailed(AccountId), } ); decl_error! { /// Error for the Zeropool module. pub enum Error for Module<T: Trait> { TooShort, TooLong, VerificationSuccessful, VerificationFailed, } } decl_module! { /// Zeropool module declaration. pub struct Module<T: Trait> for enum Call where origin: T::Origin { type Error = Error<T>; fn deposit_event() = default; /// Reservation fee. const ReservationFee: BalanceOf<T> = T::ReservationFee::get(); /// The minimum length a proof may be. const MinLength: u32 = T::MinLength::get() as u32; /// The maximum length a proof may be. const MaxLength: u32 = T::MaxLength::get() as u32; /// Store Verificatoion Key - VK /// # <weight> /// - O(1). /// - At most one balance operation. /// - One storage read/write. /// - One event. /// # </weight> /// test with: yO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh /// data is LE-encoded VK struct in base64 (check groth16verify description) #[weight = 500_000] fn set_vk(origin, vkb: Vec<u8>) -> dispatch::DispatchResult { let sender = ensure_signed(origin)?; //check is signed ensure!(vkb.len() >= 64, Error::<T>::TooShort); //check minimum length ensure!(vkb.len() <= 8192, Error::<T>::TooLong); // check maximum length //deserialize from borsh let vkstorage=vkb.clone(); let vk = base64::decode(vkb).unwrap_or_default(); let _vkd=VK::try_from_slice(&vk).unwrap_or_default(); let deposit = if let Some((_, deposit)) = <VerificationKey<T>>::get(&sender) { Self::deposit_event(RawEvent::VerificationKeyUpdated(sender.clone())); deposit } else { let deposit = T::ReservationFee::get(); T::Currency::reserve(&sender, deposit)?; Self::deposit_event(RawEvent::VerificationKeySet(sender.clone())); deposit }; <VerificationKey<T>>::insert(&sender, (vkstorage, deposit)); // Return a successful DispatchResult Ok(()) } /// Verify groth16 by json including proof and input (verification key is loaded from storage) /// # <weight> /// - O(1). /// - At most one balance operation. /// - One storage read/write. /// - One event. /// # </weight> /// /// data is Proof struct and inputs in LE-encoding, base64 and JSON (check groth16verify description) #[weight = 500_000] fn test_groth16verify(origin, jproofinput: Vec<u8>) -> dispatch::DispatchResult { let sender = ensure_signed(origin)?; //check is signed ensure!(jproofinput.len() >= 64, Error::<T>::TooShort); //check minimum length ensure!(jproofinput.len() <= 8192, Error::<T>::TooLong); // check maximum length //deserialize json jproofinput #[allow(unused_assignments)] let mut proofinput: Jsonproofinput= Jsonproofinput::default(); let r = serde_json::from_slice(&jproofinput.as_slice()); match r { Ok(rs) => proofinput=rs, Err(_e) => { Self::deposit_event(RawEvent::VerificationFailed(sender)); return Err(Error::<T>::VerificationFailed.into()) } }; //deserialize from borsh let proof = base64::decode(proofinput.proof).unwrap_or_default(); let proofd=Proof::try_from_slice(&proof).unwrap_or_default(); let input = base64::decode(proofinput.input).unwrap_or_default(); let inputd=VU256::try_from_slice(&input).unwrap_or_default(); // get vk from storage let vks = if let Some((vkstorage, _deposit)) = <VerificationKey<T>>::get(&sender) { vkstorage } else { Self::deposit_event(RawEvent::VerificationFailed(sender)); return Err(Error::<T>::VerificationFailed.into()) }; let vk: Vec<u8> = base64::decode(&vks).unwrap_or_default(); let vkd=VK::try_from_slice(&vk).unwrap_or_default(); //groth16 verification if groth16verify(vkd,proofd,inputd){ // Return a successful DispatchResult Self::deposit_event(RawEvent::VerificationSuccessful(sender)); Ok(()) } else{ // Return a failed verification DispatchResult Self::deposit_event(RawEvent::VerificationFailed(sender)); Err(Error::<T>::VerificationFailed.into()) } } } } /// Computes Groth16 verify on alt_bn128 curve. /// # Arguments /// /// * `vkd` - verification key, struct of (alpha: G1, beta:G2, gamma:G2, delta:G2, ic:[G1]) /// * `proofd` - proof, struct of (a:G1, b:G2, c:G1) /// * `inputd` - vector of imputs [Fr] /// /// # Data types /// G2 is Fr-ordered subgroup point (x:Fq2, y:Fq2) on alt_bn128 twist, /// alt_bn128 twist is Y^2 = X^3 + 3/(i+9) curve over Fq2 /// Fq2 is complex field element (re: Fq, im: Fq) /// G1 is point (x:Fq, y:Fq) on alt_bn128, /// alt_bn128 is Y^2 = X^3 + 3 curve over Fq /// Fq is LE-serialized u256 number lesser than 21888242871839275222246405745257275088696311157297823662689037894645226208583 /// Fr is LE-serialized u256 number lesser than 21888242871839275222246405745257275088548364400416034343698204186575808495617 fn groth16verify(vkd: VK, proofd: Proof, inputd: VU256) -> bool { // make verification let neg_a = alt_bn128_g1_neg(proofd.a); let acc_expr = vkd .ic .iter() .zip([U256::ONE].iter().chain(inputd.iter())) .map(|(&base, &exp)| (base, exp)) .collect::<Vec<_>>(); let acc = alt_bn128_g1_multiexp(&acc_expr); let pairing_expr = vec![ (neg_a, proofd.b), (vkd.alpha, vkd.beta), (acc, vkd.gamma), (proofd.c, vkd.delta), ]; alt_bn128_pairing_check(&pairing_expr) } // function to deserialize json field pub fn de_string_to_bytes<'de, D>(de: D) -> Result<sp_std::prelude::Vec<u8>, D::Error> where D: Deserializer<'de>, { let s: &str = Deserialize::deserialize(de)?; Ok(s.as_bytes().to_vec()) } // groth16 verification functions (further calls to functions in alt_b128.rs) pub fn alt_bn128_g1_neg(p: G1) -> G1 { alt_bn128_g1_sum(&[(true, p)]) } pub fn alt_bn128_g1_sum(v: &[(bool, G1)]) -> G1 { let data = v.try_to_vec().unwrap_or_default(); let res = crate::alt_bn128::alt_bn128_g1_sum(&data).unwrap_or_default(); let mut res_ptr = &res[..]; <G1 as BorshDeserialize>::deserialize(&mut res_ptr).unwrap_or_default() } pub fn alt_bn128_g1_multiexp(v: &[(G1, U256)]) -> G1 { let data = v.try_to_vec().unwrap_or_default(); let res = crate::alt_bn128::alt_bn128_g1_multiexp(&data).unwrap_or_default(); let mut res_ptr = &res[..]; <G1 as BorshDeserialize>::deserialize(&mut res_ptr).unwrap_or_default() } pub fn alt_bn128_pairing_check(v: &[(G1, G2)]) -> bool { let data = v.try_to_vec().unwrap_or_default(); crate::alt_bn128::alt_bn128_pairing_check(&data).unwrap_or(false) } // Testing unit, execue by: cargo run test #[cfg(test)] mod tests { extern crate pretty_assertions; use super::*; use std::str; #[serde(crate = "alt_serde")] #[derive(Deserialize, Default)] struct Jsonvkproofinput { #[serde(deserialize_with = "de_string_to_bytes")] vk: sp_std::prelude::Vec<u8>, #[serde(deserialize_with = "de_string_to_bytes")] proof: sp_std::prelude::Vec<u8>, #[serde(deserialize_with = "de_string_to_bytes")] input: sp_std::prelude::Vec<u8>, } #[test] fn deserialize_json() { let jvkproofinput: Vec<u8>=br#"{"vk":"yO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh","proof":"Qexag8d0jvm1IWZywscRBuvdSEvlGuhvVg5Qj97vhS5VFas06bgj/yXiuZ+yJ/WZWCYDYq8e5HZPITpoaHAvGckDPBplyUtn8zZ3UI4f5E1uLmxlehAkzVK33Fp8/SEZX4v8OLLT3MP/FWhDvS43u2sLvZcCstjVjbarImuLiSA0IW7UmNgG7u8x99JExO0pp0EAGJ3PiBOzyZ/PhxUPBXvOgxhwNzx0nzZzp+aSY8yhsWxFWRl6UWzmS6J/ieUS1q5Tjwq9gs4qcX6+Q9WWRpvYVboY+f4d6smQyryKdB5Hi5E8/jWGPoD9tFJDN4KVnnESrKi7fVjH6A3twUaQEw==","input":"AwAAAMI1CN4U9DnKW3soxArLClszrtTa/MGicksQVWpir/QNW/hp3N50wmjr1CUHvGP6u6WnrdK7oRDtSHgjcjmUVyr8NQtA06gcVk9m3KPdmWele0Bx9AcLpToixRb2FCx/JQ=="}"#.to_vec(); let vkc = r#"yO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh"#; let vkproofinput: Jsonvkproofinput = serde_json::from_slice(&jvkproofinput.as_slice()).unwrap(); let vkcmp = match str::from_utf8(&vkproofinput.vk) { Ok(v) => v, Err(e) => panic!("Invalid UTF-8 sequence: {}", e), }; assert_eq!(vkcmp, vkc); } #[test] fn failed_deserialize_json() { let jvkproofinput: Vec<u8>=br#"{"vk":"xO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh","proof":"Qexag8d0jvm1IWZywscRBuvdSEvlGuhvVg5Qj97vhS5VFas06bgj/yXiuZ+yJ/WZWCYDYq8e5HZPITpoaHAvGckDPBplyUtn8zZ3UI4f5E1uLmxlehAkzVK33Fp8/SEZX4v8OLLT3MP/FWhDvS43u2sLvZcCstjVjbarImuLiSA0IW7UmNgG7u8x99JExO0pp0EAGJ3PiBOzyZ/PhxUPBXvOgxhwNzx0nzZzp+aSY8yhsWxFWRl6UWzmS6J/ieUS1q5Tjwq9gs4qcX6+Q9WWRpvYVboY+f4d6smQyryKdB5Hi5E8/jWGPoD9tFJDN4KVnnESrKi7fVjH6A3twUaQEw==","input":"AwAAAMI1CN4U9DnKW3soxArLClszrtTa/MGicksQVWpir/QNW/hp3N50wmjr1CUHvGP6u6WnrdK7oRDtSHgjcjmUVyr8NQtA06gcVk9m3KPdmWele0Bx9AcLpToixRb2FCx/JQ=="}"#.to_vec(); let vkc = r#"yO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh"#; let vkproofinput: Jsonvkproofinput = serde_json::from_slice(&jvkproofinput.as_slice()).unwrap(); let vkcmp = match str::from_utf8(&vkproofinput.vk) { Ok(v) => v, Err(e) => panic!("Invalid UTF-8 sequence: {}", e), }; assert!(!vkcmp.eq(vkc)); } #[test] fn deserialize_borsh() { let jvkproofinput: Vec<u8>=br#"{"vk":"yO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh","proof":"Qexag8d0jvm1IWZywscRBuvdSEvlGuhvVg5Qj97vhS5VFas06bgj/yXiuZ+yJ/WZWCYDYq8e5HZPITpoaHAvGckDPBplyUtn8zZ3UI4f5E1uLmxlehAkzVK33Fp8/SEZX4v8OLLT3MP/FWhDvS43u2sLvZcCstjVjbarImuLiSA0IW7UmNgG7u8x99JExO0pp0EAGJ3PiBOzyZ/PhxUPBXvOgxhwNzx0nzZzp+aSY8yhsWxFWRl6UWzmS6J/ieUS1q5Tjwq9gs4qcX6+Q9WWRpvYVboY+f4d6smQyryKdB5Hi5E8/jWGPoD9tFJDN4KVnnESrKi7fVjH6A3twUaQEw==","input":"AwAAAMI1CN4U9DnKW3soxArLClszrtTa/MGicksQVWpir/QNW/hp3N50wmjr1CUHvGP6u6WnrdK7oRDtSHgjcjmUVyr8NQtA06gcVk9m3KPdmWele0Bx9AcLpToixRb2FCx/JQ=="}"#.to_vec(); let _vkc = r#"yO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh"#; let vkproofinput: Jsonvkproofinput = serde_json::from_slice(&jvkproofinput.as_slice()).unwrap(); let _vkcmp = match str::from_utf8(&vkproofinput.vk) { Ok(v) => v, Err(e) => panic!("Invalid UTF-8 sequence: {}", e), }; let _vkstorage = vkproofinput.vk.clone(); let vk = base64::decode(vkproofinput.vk).unwrap_or(vec![]); let _vkd = VK::try_from_slice(&vk).unwrap_or(VK::default()); let proof = base64::decode(vkproofinput.proof).unwrap_or(vec![]); let _proofd = Proof::try_from_slice(&proof).unwrap_or(Proof::default()); let input = base64::decode(vkproofinput.input).unwrap_or(vec![]); let _inputd = VU256::try_from_slice(&input).unwrap_or(VU256::default()); } #[test] fn failed_deserialize_borsh() { let jvkproofinput: Vec<u8>=br#"{"vk":"xO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh","proof":"Qexag8d0jvm1IWZywscRBuvdSEvlGuhvVg5Qj97vhS5VFas06bgj/yXiuZ+yJ/WZWCYDYq8e5HZPITpoaHAvGckDPBplyUtn8zZ3UI4f5E1uLmxlehAkzVK33Fp8/SEZX4v8OLLT3MP/FWhDvS43u2sLvZcCstjVjbarImuLiSA0IW7UmNgG7u8x99JExO0pp0EAGJ3PiBOzyZ/PhxUPBXvOgxhwNzx0nzZzp+aSY8yhsWxFWRl6UWzmS6J/ieUS1q5Tjwq9gs4qcX6+Q9WWRpvYVboY+f4d6smQyryKdB5Hi5E8/jWGPoD9tFJDN4KVnnESrKi7fVjH6A3twUaQEw==","input":"AwAAAMI1CN4U9DnKW3soxArLClszrtTa/MGicksQVWpir/QNW/hp3N50wmjr1CUHvGP6u6WnrdK7oRDtSHgjcjmUVyr8NQtA06gcVk9m3KPdmWele0Bx9AcLpToixRb2FCx/JQ=="}"#.to_vec(); let _vkc = r#"yO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh"#; let vkproofinput: Jsonvkproofinput = serde_json::from_slice(&jvkproofinput.as_slice()).unwrap(); let _vkcmp = match str::from_utf8(&vkproofinput.vk) { Ok(v) => v, Err(e) => panic!("Invalid UTF-8 sequence: {}", e), }; let vkstorage = vkproofinput.vk.clone(); let vk = base64::decode(vkstorage).unwrap(); let vkd = VK::try_from_slice(&vk).unwrap(); let vkstoragec = vkproofinput.vk.clone(); let vkc = base64::decode(vkstoragec).unwrap(); let vkdc = VK::try_from_slice(&vkc).unwrap(); assert!(vkc.eq(&vk)); } #[test] fn groth16_verification() { let jvkproofinput: Vec<u8>=br#"{"vk":"yO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh","proof":"Qexag8d0jvm1IWZywscRBuvdSEvlGuhvVg5Qj97vhS5VFas06bgj/yXiuZ+yJ/WZWCYDYq8e5HZPITpoaHAvGckDPBplyUtn8zZ3UI4f5E1uLmxlehAkzVK33Fp8/SEZX4v8OLLT3MP/FWhDvS43u2sLvZcCstjVjbarImuLiSA0IW7UmNgG7u8x99JExO0pp0EAGJ3PiBOzyZ/PhxUPBXvOgxhwNzx0nzZzp+aSY8yhsWxFWRl6UWzmS6J/ieUS1q5Tjwq9gs4qcX6+Q9WWRpvYVboY+f4d6smQyryKdB5Hi5E8/jWGPoD9tFJDN4KVnnESrKi7fVjH6A3twUaQEw==","input":"AwAAAMI1CN4U9DnKW3soxArLClszrtTa/MGicksQVWpir/QNW/hp3N50wmjr1CUHvGP6u6WnrdK7oRDtSHgjcjmUVyr8NQtA06gcVk9m3KPdmWele0Bx9AcLpToixRb2FCx/JQ=="}"#.to_vec(); let mut _vkproofinput: Jsonvkproofinput = Jsonvkproofinput::default(); let vkproofinput: Jsonvkproofinput = serde_json::from_slice(&jvkproofinput.as_slice()).unwrap(); let _vkstorage = vkproofinput.vk.clone(); let vk = base64::decode(vkproofinput.vk).unwrap_or(vec![]); let vkd = VK::try_from_slice(&vk).unwrap_or(VK::default()); let proof = base64::decode(vkproofinput.proof).unwrap_or(vec![]); let proofd = Proof::try_from_slice(&proof).unwrap_or(Proof::default()); let input = base64::decode(vkproofinput.input).unwrap_or(vec![]); let inputd = VU256::try_from_slice(&input).unwrap_or(VU256::default()); let neg_a = alt_bn128_g1_neg(proofd.a); let acc_expr = vkd .ic .iter() .zip([U256::ONE].iter().chain(inputd.iter())) .map(|(&base, &exp)| (base, exp)) .collect::<Vec<_>>(); let acc = alt_bn128_g1_multiexp(&acc_expr); let pairing_expr = vec![ (neg_a, proofd.b), (vkd.alpha, vkd.beta), (acc, vkd.gamma), (proofd.c, vkd.delta), ]; let verification: bool = alt_bn128_pairing_check(&pairing_expr); assert_eq!(verification, true); } #[test] fn failed_groth16_verification() { let jvkproofinput: Vec<u8>=br#"{"vk":"XO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh","proof":"Qexag8d0jvm1IWZywscRBuvdSEvlGuhvVg5Qj97vhS5VFas06bgj/yXiuZ+yJ/WZWCYDYq8e5HZPITpoaHAvGckDPBplyUtn8zZ3UI4f5E1uLmxlehAkzVK33Fp8/SEZX4v8OLLT3MP/FWhDvS43u2sLvZcCstjVjbarImuLiSA0IW7UmNgG7u8x99JExO0pp0EAGJ3PiBOzyZ/PhxUPBXvOgxhwNzx0nzZzp+aSY8yhsWxFWRl6UWzmS6J/ieUS1q5Tjwq9gs4qcX6+Q9WWRpvYVboY+f4d6smQyryKdB5Hi5E8/jWGPoD9tFJDN4KVnnESrKi7fVjH6A3twUaQEw==","input":"AwAAAMI1CN4U9DnKW3soxArLClszrtTa/MGicksQVWpir/QNW/hp3N50wmjr1CUHvGP6u6WnrdK7oRDtSHgjcjmUVyr8NQtA06gcVk9m3KPdmWele0Bx9AcLpToixRb2FCx/JQ=="}"#.to_vec(); let mut _vkproofinput: Jsonvkproofinput = Jsonvkproofinput::default(); let vkproofinput: Jsonvkproofinput = serde_json::from_slice(&jvkproofinput.as_slice()).unwrap(); let _vkstorage = vkproofinput.vk.clone(); let vk = base64::decode(vkproofinput.vk).unwrap_or(vec![]); let vkd = VK::try_from_slice(&vk).unwrap_or(VK::default()); let proof = base64::decode(vkproofinput.proof).unwrap_or(vec![]); let proofd = Proof::try_from_slice(&proof).unwrap_or(Proof::default()); let input = base64::decode(vkproofinput.input).unwrap_or(vec![]); let inputd = VU256::try_from_slice(&input).unwrap_or(VU256::default()); let neg_a = alt_bn128_g1_neg(proofd.a); let acc_expr = vkd .ic .iter() .zip([U256::ONE].iter().chain(inputd.iter())) .map(|(&base, &exp)| (base, exp)) .collect::<Vec<_>>(); let acc = alt_bn128_g1_multiexp(&acc_expr); let pairing_expr = vec![ (neg_a, proofd.b), (vkd.alpha, vkd.beta), (acc, vkd.gamma), (proofd.c, vkd.delta), ]; let verification: bool = alt_bn128_pairing_check(&pairing_expr); assert!(!verification); } }
72.750611
1,506
0.765922
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn deserialize_json() {\n let jvkproofinput: Vec<u8>=br#\"{\"vk\":\"yO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh\",\"proof\":\"Qexag8d0jvm1IWZywscRBuvdSEvlGuhvVg5Qj97vhS5VFas06bgj/yXiuZ+yJ/WZWCYDYq8e5HZPITpoaHAvGckDPBplyUtn8zZ3UI4f5E1uLmxlehAkzVK33Fp8/SEZX4v8OLLT3MP/FWhDvS43u2sLvZcCstjVjbarImuLiSA0IW7UmNgG7u8x99JExO0pp0EAGJ3PiBOzyZ/PhxUPBXvOgxhwNzx0nzZzp+aSY8yhsWxFWRl6UWzmS6J/ieUS1q5Tjwq9gs4qcX6+Q9WWRpvYVboY+f4d6smQyryKdB5Hi5E8/jWGPoD9tFJDN4KVnnESrKi7fVjH6A3twUaQEw==\",\"input\":\"AwAAAMI1CN4U9DnKW3soxArLClszrtTa/MGicksQVWpir/QNW/hp3N50wmjr1CUHvGP6u6WnrdK7oRDtSHgjcjmUVyr8NQtA06gcVk9m3KPdmWele0Bx9AcLpToixRb2FCx/JQ==\"}\"#.to_vec();\n let vkc = r#\"yO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh\"#;\n let vkproofinput: Jsonvkproofinput =\n serde_json::from_slice(&jvkproofinput.as_slice()).unwrap();\n let vkcmp = match str::from_utf8(&vkproofinput.vk) {\n Ok(v) => v,\n Err(e) => panic!(\"Invalid UTF-8 sequence: {}\", e),\n };\n assert_eq!(vkcmp, vkc);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn failed_deserialize_json() {\n let jvkproofinput: Vec<u8>=br#\"{\"vk\":\"xO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh\",\"proof\":\"Qexag8d0jvm1IWZywscRBuvdSEvlGuhvVg5Qj97vhS5VFas06bgj/yXiuZ+yJ/WZWCYDYq8e5HZPITpoaHAvGckDPBplyUtn8zZ3UI4f5E1uLmxlehAkzVK33Fp8/SEZX4v8OLLT3MP/FWhDvS43u2sLvZcCstjVjbarImuLiSA0IW7UmNgG7u8x99JExO0pp0EAGJ3PiBOzyZ/PhxUPBXvOgxhwNzx0nzZzp+aSY8yhsWxFWRl6UWzmS6J/ieUS1q5Tjwq9gs4qcX6+Q9WWRpvYVboY+f4d6smQyryKdB5Hi5E8/jWGPoD9tFJDN4KVnnESrKi7fVjH6A3twUaQEw==\",\"input\":\"AwAAAMI1CN4U9DnKW3soxArLClszrtTa/MGicksQVWpir/QNW/hp3N50wmjr1CUHvGP6u6WnrdK7oRDtSHgjcjmUVyr8NQtA06gcVk9m3KPdmWele0Bx9AcLpToixRb2FCx/JQ==\"}\"#.to_vec();\n let vkc = r#\"yO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh\"#;\n let vkproofinput: Jsonvkproofinput =\n serde_json::from_slice(&jvkproofinput.as_slice()).unwrap();\n let vkcmp = match str::from_utf8(&vkproofinput.vk) {\n Ok(v) => v,\n Err(e) => panic!(\"Invalid UTF-8 sequence: {}\", e),\n };\n assert!(!vkcmp.eq(vkc));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn deserialize_borsh() {\n let jvkproofinput: Vec<u8>=br#\"{\"vk\":\"yO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh\",\"proof\":\"Qexag8d0jvm1IWZywscRBuvdSEvlGuhvVg5Qj97vhS5VFas06bgj/yXiuZ+yJ/WZWCYDYq8e5HZPITpoaHAvGckDPBplyUtn8zZ3UI4f5E1uLmxlehAkzVK33Fp8/SEZX4v8OLLT3MP/FWhDvS43u2sLvZcCstjVjbarImuLiSA0IW7UmNgG7u8x99JExO0pp0EAGJ3PiBOzyZ/PhxUPBXvOgxhwNzx0nzZzp+aSY8yhsWxFWRl6UWzmS6J/ieUS1q5Tjwq9gs4qcX6+Q9WWRpvYVboY+f4d6smQyryKdB5Hi5E8/jWGPoD9tFJDN4KVnnESrKi7fVjH6A3twUaQEw==\",\"input\":\"AwAAAMI1CN4U9DnKW3soxArLClszrtTa/MGicksQVWpir/QNW/hp3N50wmjr1CUHvGP6u6WnrdK7oRDtSHgjcjmUVyr8NQtA06gcVk9m3KPdmWele0Bx9AcLpToixRb2FCx/JQ==\"}\"#.to_vec();\n let _vkc = r#\"yO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh\"#;\n let vkproofinput: Jsonvkproofinput =\n serde_json::from_slice(&jvkproofinput.as_slice()).unwrap();\n let _vkcmp = match str::from_utf8(&vkproofinput.vk) {\n Ok(v) => v,\n Err(e) => panic!(\"Invalid UTF-8 sequence: {}\", e),\n };\n let _vkstorage = vkproofinput.vk.clone();\n let vk = base64::decode(vkproofinput.vk).unwrap_or(vec![]);\n let _vkd = VK::try_from_slice(&vk).unwrap_or(VK::default());\n let proof = base64::decode(vkproofinput.proof).unwrap_or(vec![]);\n let _proofd = Proof::try_from_slice(&proof).unwrap_or(Proof::default());\n let input = base64::decode(vkproofinput.input).unwrap_or(vec![]);\n let _inputd = VU256::try_from_slice(&input).unwrap_or(VU256::default());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn failed_deserialize_borsh() {\n let jvkproofinput: Vec<u8>=br#\"{\"vk\":\"xO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh\",\"proof\":\"Qexag8d0jvm1IWZywscRBuvdSEvlGuhvVg5Qj97vhS5VFas06bgj/yXiuZ+yJ/WZWCYDYq8e5HZPITpoaHAvGckDPBplyUtn8zZ3UI4f5E1uLmxlehAkzVK33Fp8/SEZX4v8OLLT3MP/FWhDvS43u2sLvZcCstjVjbarImuLiSA0IW7UmNgG7u8x99JExO0pp0EAGJ3PiBOzyZ/PhxUPBXvOgxhwNzx0nzZzp+aSY8yhsWxFWRl6UWzmS6J/ieUS1q5Tjwq9gs4qcX6+Q9WWRpvYVboY+f4d6smQyryKdB5Hi5E8/jWGPoD9tFJDN4KVnnESrKi7fVjH6A3twUaQEw==\",\"input\":\"AwAAAMI1CN4U9DnKW3soxArLClszrtTa/MGicksQVWpir/QNW/hp3N50wmjr1CUHvGP6u6WnrdK7oRDtSHgjcjmUVyr8NQtA06gcVk9m3KPdmWele0Bx9AcLpToixRb2FCx/JQ==\"}\"#.to_vec();\n let _vkc = r#\"yO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh\"#;\n let vkproofinput: Jsonvkproofinput =\n serde_json::from_slice(&jvkproofinput.as_slice()).unwrap();\n let _vkcmp = match str::from_utf8(&vkproofinput.vk) {\n Ok(v) => v,\n Err(e) => panic!(\"Invalid UTF-8 sequence: {}\", e),\n };\n let vkstorage = vkproofinput.vk.clone();\n let vk = base64::decode(vkstorage).unwrap();\n let vkd = VK::try_from_slice(&vk).unwrap();\n let vkstoragec = vkproofinput.vk.clone();\n let vkc = base64::decode(vkstoragec).unwrap();\n let vkdc = VK::try_from_slice(&vkc).unwrap();\n assert!(vkc.eq(&vk));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn groth16_verification() {\n let jvkproofinput: Vec<u8>=br#\"{\"vk\":\"yO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh\",\"proof\":\"Qexag8d0jvm1IWZywscRBuvdSEvlGuhvVg5Qj97vhS5VFas06bgj/yXiuZ+yJ/WZWCYDYq8e5HZPITpoaHAvGckDPBplyUtn8zZ3UI4f5E1uLmxlehAkzVK33Fp8/SEZX4v8OLLT3MP/FWhDvS43u2sLvZcCstjVjbarImuLiSA0IW7UmNgG7u8x99JExO0pp0EAGJ3PiBOzyZ/PhxUPBXvOgxhwNzx0nzZzp+aSY8yhsWxFWRl6UWzmS6J/ieUS1q5Tjwq9gs4qcX6+Q9WWRpvYVboY+f4d6smQyryKdB5Hi5E8/jWGPoD9tFJDN4KVnnESrKi7fVjH6A3twUaQEw==\",\"input\":\"AwAAAMI1CN4U9DnKW3soxArLClszrtTa/MGicksQVWpir/QNW/hp3N50wmjr1CUHvGP6u6WnrdK7oRDtSHgjcjmUVyr8NQtA06gcVk9m3KPdmWele0Bx9AcLpToixRb2FCx/JQ==\"}\"#.to_vec();\n let mut _vkproofinput: Jsonvkproofinput = Jsonvkproofinput::default();\n let vkproofinput: Jsonvkproofinput =\n serde_json::from_slice(&jvkproofinput.as_slice()).unwrap();\n let _vkstorage = vkproofinput.vk.clone();\n let vk = base64::decode(vkproofinput.vk).unwrap_or(vec![]);\n let vkd = VK::try_from_slice(&vk).unwrap_or(VK::default());\n let proof = base64::decode(vkproofinput.proof).unwrap_or(vec![]);\n let proofd = Proof::try_from_slice(&proof).unwrap_or(Proof::default());\n let input = base64::decode(vkproofinput.input).unwrap_or(vec![]);\n let inputd = VU256::try_from_slice(&input).unwrap_or(VU256::default());\n let neg_a = alt_bn128_g1_neg(proofd.a);\n let acc_expr = vkd\n .ic\n .iter()\n .zip([U256::ONE].iter().chain(inputd.iter()))\n .map(|(&base, &exp)| (base, exp))\n .collect::<Vec<_>>();\n let acc = alt_bn128_g1_multiexp(&acc_expr);\n let pairing_expr = vec![\n (neg_a, proofd.b),\n (vkd.alpha, vkd.beta),\n (acc, vkd.gamma),\n (proofd.c, vkd.delta),\n ];\n let verification: bool = alt_bn128_pairing_check(&pairing_expr);\n assert_eq!(verification, true);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn failed_groth16_verification() {\n let jvkproofinput: Vec<u8>=br#\"{\"vk\":\"XO5EICtE+JVjTbRYkayI0Y/BoOJtE9lsrqeCTKTDnxD8UjB8B51wrVsQsVrsi6Uk0b2UKGfs3AJUX2Eud5wnET/ze/CsefR74bOn50BmVoExPPDiJGRD2IejItInd/wbtAH7GstyB1Q1j9uROBBAgE2eEj/cwRcHJobe4MP1mQIsiHdC5KXrBZFMlcRQi19O5pRHJ3fra+CNrckf5PHVL1NDT3E4ah/xGoRIbB0DfMSC2AO5NCyuZzJiAMBEKEcLbiazu9JOT65EXSc7HGM9IKlQXgpITR/jikWNxJc/Jyn6KiimTBN/yj4NVjAogElMdLmVoelMa0SAen8Z5ZwkFc6j3IriiWbKQnnkocqd++FqYs4gTh2rFDvcn2YpAhAmnMf35ssgfTFSIOyLZeRQPJ/SzCQMvSq8p1TAkgF85xv+1Vwd0UmrwJXyPVWhevfis0jEd6Cw78ESIMwB7S4dJwNAnVjEBRrKGfOAAzBIiTQRVMSMY2a1nMP/vr57eJwrOYvVboNDUHw8N+u1KoT3vTQkt6+bdeUBw2X/HBbeuyLcmx9AdsbJ0QY1GGF4cgGnSx9kGtcL9UY4qMWVtJ++LAQAAABZB9VFKNzCZgjPMZ9MTfotIL1czmkU9p4L3+6udM/DCAIGsaMeBAN/AhWI+GDLJK3EPzfiVDtw9PWWv+mifJUEQqRUa63wkfB2CouGxTpfsMPlZW93gzGXl5C4lmqMSQnAYpBIHANPM/R/DtA6eMTKKgKBfqgSMjf8YwlmfckmEkbsEZYwsUj2B+ryafp/qj39z80B/33p62Wz+OdwpcIYLSyprNYGC1nyO/jlRIhqRFhx9qkBRjKz/ddvFv7bdAeyPpjCqbT/6zrE22RSdm1I+tceC6xm3OUJE3wX4d5XF5z1EXo17iShXLdYhwVcd//YzyysetRirUxRPeXNAuAh\",\"proof\":\"Qexag8d0jvm1IWZywscRBuvdSEvlGuhvVg5Qj97vhS5VFas06bgj/yXiuZ+yJ/WZWCYDYq8e5HZPITpoaHAvGckDPBplyUtn8zZ3UI4f5E1uLmxlehAkzVK33Fp8/SEZX4v8OLLT3MP/FWhDvS43u2sLvZcCstjVjbarImuLiSA0IW7UmNgG7u8x99JExO0pp0EAGJ3PiBOzyZ/PhxUPBXvOgxhwNzx0nzZzp+aSY8yhsWxFWRl6UWzmS6J/ieUS1q5Tjwq9gs4qcX6+Q9WWRpvYVboY+f4d6smQyryKdB5Hi5E8/jWGPoD9tFJDN4KVnnESrKi7fVjH6A3twUaQEw==\",\"input\":\"AwAAAMI1CN4U9DnKW3soxArLClszrtTa/MGicksQVWpir/QNW/hp3N50wmjr1CUHvGP6u6WnrdK7oRDtSHgjcjmUVyr8NQtA06gcVk9m3KPdmWele0Bx9AcLpToixRb2FCx/JQ==\"}\"#.to_vec();\n let mut _vkproofinput: Jsonvkproofinput = Jsonvkproofinput::default();\n let vkproofinput: Jsonvkproofinput =\n serde_json::from_slice(&jvkproofinput.as_slice()).unwrap();\n let _vkstorage = vkproofinput.vk.clone();\n let vk = base64::decode(vkproofinput.vk).unwrap_or(vec![]);\n let vkd = VK::try_from_slice(&vk).unwrap_or(VK::default());\n let proof = base64::decode(vkproofinput.proof).unwrap_or(vec![]);\n let proofd = Proof::try_from_slice(&proof).unwrap_or(Proof::default());\n let input = base64::decode(vkproofinput.input).unwrap_or(vec![]);\n let inputd = VU256::try_from_slice(&input).unwrap_or(VU256::default());\n let neg_a = alt_bn128_g1_neg(proofd.a);\n let acc_expr = vkd\n .ic\n .iter()\n .zip([U256::ONE].iter().chain(inputd.iter()))\n .map(|(&base, &exp)| (base, exp))\n .collect::<Vec<_>>();\n let acc = alt_bn128_g1_multiexp(&acc_expr);\n let pairing_expr = vec![\n (neg_a, proofd.b),\n (vkd.alpha, vkd.beta),\n (acc, vkd.gamma),\n (proofd.c, vkd.delta),\n ];\n let verification: bool = alt_bn128_pairing_check(&pairing_expr);\n assert!(!verification);\n }\n}" ]
f704c532907ed09f7d268415b0c6fbb2abf5a86e
3,521
rs
Rust
src/identity.rs
transumption-unstable/zerotier
891b17b292abb2d7f982675c4c778613edfda59f
[ "Apache-2.0" ]
2
2020-11-22T20:50:54.000Z
2021-02-06T13:02:47.000Z
src/identity.rs
transumption-unstable/zerotier
891b17b292abb2d7f982675c4c778613edfda59f
[ "Apache-2.0" ]
2
2020-01-29T13:24:14.000Z
2020-04-22T03:39:13.000Z
src/identity.rs
transumption/zerotier-rust
891b17b292abb2d7f982675c4c778613edfda59f
[ "Apache-2.0" ]
1
2021-07-10T18:34:30.000Z
2021-07-10T18:34:30.000Z
use crate::{Address, InternalError, PublicKey, SecretKey}; use ed25519_dalek::Keypair; use failure::*; use std::convert::{TryFrom, TryInto}; use std::fs; use std::path::Path; /// Combination of [`Address`](struct.Address.html), [`PublicKey`](struct.PublicKey) and optionally /// [`SecretKey`](struct.SecretKey.html). pub struct Identity { pub address: Address, pub public_key: PublicKey, pub secret_key: Option<SecretKey>, } impl Identity { /// Read ZeroTier identity from given location. pub fn read<P: AsRef<Path>>(path: P) -> Fallible<Self> { Identity::try_from(&fs::read_to_string(path)?[..]) } /// Read ZeroTier identity from default location. pub fn read_default() -> Fallible<Self> { Identity::read("/var/lib/zerotier-one/identity.secret") } } impl TryFrom<SecretKey> for Identity { type Error = Error; fn try_from(secret_key: SecretKey) -> Fallible<Self> { let public_key = PublicKey::from(&secret_key); Ok(Self { address: Address::try_from(&public_key)?, public_key: PublicKey::from(&secret_key), secret_key: Some(secret_key) }) } } /// TODO: use IO reader instead impl TryFrom<&str> for Identity { type Error = Error; fn try_from(identity: &str) -> Fallible<Self> { let split_identity: Vec<&str> = identity.split(':').collect(); let (address, public_key, maybe_secret_key) = match &split_identity[..] { [address, "0", public_key] => (address, public_key, None), [address, "0", public_key, secret_key] => (address, public_key, Some(secret_key)), _ => return Err(InternalError::MalformedIdentity.into()) }; Ok(Identity { address: Address::try_from(hex::decode(address)?.as_slice())?, public_key: PublicKey::try_from(hex::decode(public_key)?.as_slice())?, secret_key: match maybe_secret_key { Some(secret_key) => Some(SecretKey::try_from(hex::decode(secret_key)?.as_slice())?), None => None } }) } } impl TryInto<Keypair> for Identity { type Error = Error; fn try_into(self) -> Fallible<Keypair> { Ok(Keypair { public: self.public_key.ed, secret: self.secret_key.unwrap().ed }) } } #[cfg(test)] pub mod tests { use super::*; #[test] fn test_identity() -> Fallible<()> { // nix-shell -p zerotierone --run 'zerotier-idtool generate' let identity_str = "538c34e03c:0:070288330a72d2aa3cb7935dfe6028d9fb83bdb42240aaa05e33529121babd183ff775351742a47487454195c08c0e83c520e7466fcdde3396a0c4cd40557737:f20542ab6955fe140fb3a5be9557666b9c89a3e2b73432de46d827d11736773aca15c3e03b89a1d09436ae45bc02f84b8d5a0a2f6c0d42b3856c2b22f5ab2b27"; let identity = Identity::try_from(identity_str)?; assert_eq!(identity.address, Address::try_from(&identity.public_key)?); let secret_key = identity.secret_key.unwrap(); let public_key = PublicKey::from(&secret_key); assert_eq!(identity.public_key.ed, public_key.ed); assert_eq!(identity.public_key.dh.as_bytes(), public_key.dh.as_bytes()); let keypair = ed25519_dalek::Keypair { public: public_key.ed, secret: secret_key.ed, }; let message = b"7VbLpreCRY738Sw4OGecCw"; let signature = keypair.sign(message); identity.public_key.ed.verify(message, &signature)?; Ok(()) } }
32.302752
300
0.641579
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_identity() -> Fallible<()> {\n // nix-shell -p zerotierone --run 'zerotier-idtool generate'\n let identity_str = \"538c34e03c:0:070288330a72d2aa3cb7935dfe6028d9fb83bdb42240aaa05e33529121babd183ff775351742a47487454195c08c0e83c520e7466fcdde3396a0c4cd40557737:f20542ab6955fe140fb3a5be9557666b9c89a3e2b73432de46d827d11736773aca15c3e03b89a1d09436ae45bc02f84b8d5a0a2f6c0d42b3856c2b22f5ab2b27\";\n let identity = Identity::try_from(identity_str)?;\n\n assert_eq!(identity.address, Address::try_from(&identity.public_key)?);\n\n let secret_key = identity.secret_key.unwrap();\n let public_key = PublicKey::from(&secret_key);\n\n assert_eq!(identity.public_key.ed, public_key.ed);\n assert_eq!(identity.public_key.dh.as_bytes(), public_key.dh.as_bytes());\n\n let keypair = ed25519_dalek::Keypair {\n public: public_key.ed,\n secret: secret_key.ed,\n };\n\n let message = b\"7VbLpreCRY738Sw4OGecCw\";\n let signature = keypair.sign(message);\n\n identity.public_key.ed.verify(message, &signature)?;\n\n Ok(())\n }\n}" ]
f704c6355e42d1c4fe88e674b07b0a95a07be04b
2,118
rs
Rust
kata/roman_numerals/src/tests.rs
erickhagstrom/rust_stuff
94712813ec44ceb17e75f6d407cfb111499bbc76
[ "Unlicense" ]
null
null
null
kata/roman_numerals/src/tests.rs
erickhagstrom/rust_stuff
94712813ec44ceb17e75f6d407cfb111499bbc76
[ "Unlicense" ]
null
null
null
kata/roman_numerals/src/tests.rs
erickhagstrom/rust_stuff
94712813ec44ceb17e75f6d407cfb111499bbc76
[ "Unlicense" ]
null
null
null
use crate::*; fn test_roman(given: usize, expected: &str) { assert_eq!(arabic_to_roman(given).unwrap(), expected); } #[test] fn one() { test_roman(1, "I"); } #[test] fn two() { test_roman(2, "II"); } #[test] fn three() { test_roman(3, "III"); } #[test] fn four() { test_roman(4, "IV"); } #[test] fn five() { test_roman(5, "V"); } #[test] fn eight() { test_roman(8, "VIII"); } #[test] fn nine() { test_roman(9, "IX"); } #[test] fn thirty() { test_roman(30, "XXX"); } #[test] fn thirty_four() { test_roman(34, "XXXIV"); } #[test] fn thirty_eight() { test_roman(38, "XXXVIII"); } #[test] fn thirty_nine() { test_roman(39, "XXXIX"); } #[test] fn forty() { test_roman(40, "XL"); } #[test] fn forty_eight() { test_roman(48, "XLVIII"); } #[test] fn forty_nine() { test_roman(49, "XLIX"); } #[test] fn fifty() { test_roman(50, "L"); } #[test] fn eighty_eight() { test_roman(88, "LXXXVIII"); } #[test] fn eighty_nine() { test_roman(89, "LXXXIX"); } #[test] fn ninety() { test_roman(90, "XC"); } #[test] fn ninety_eight() { test_roman(98, "XCVIII"); } #[test] fn ninety_nine() { test_roman(99, "XCIX"); } #[test] fn one_hundred() { test_roman(100, "C"); } #[test] fn three_hundred_ninety_nine() { test_roman(399, "CCCXCIX"); } #[test] fn four_hundred() { test_roman(400, "CD"); } #[test] fn four_hundred_ninety_nine() { test_roman(499, "CDXCIX"); } #[test] fn five_hundred() { test_roman(500, "D"); } #[test] fn eight_hundred_ninety_nine() { test_roman(899, "DCCCXCIX"); } #[test] fn nine_hundred() { test_roman(900, "CM"); } #[test] fn nine_nundred_ninety_nine() { test_roman(999, "CMXCIX"); } #[test] fn one_thousand() { test_roman(1_000, "M"); } #[test] fn three_thousand_nine_hundred_ninety_nine() { test_roman(3_999, "MMMCMXCIX"); } #[test] fn bounds_zero() { assert!(match arabic_to_roman(0) { Ok(_) => false, Err(e) => "Must be greater than 0: 0" == e, }) } #[test] fn bounds_four_thousand() { assert!(match arabic_to_roman(4_000) { Ok(_) => false, Err(e) => "Must be less than 4,000: 4000" == e, }) }
12.313953
56
0.596317
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn one() {\n test_roman(1, \"I\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn two() {\n test_roman(2, \"II\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn three() {\n test_roman(3, \"III\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn four() {\n test_roman(4, \"IV\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn five() {\n test_roman(5, \"V\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn eight() {\n test_roman(8, \"VIII\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn nine() {\n test_roman(9, \"IX\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn thirty() {\n test_roman(30, \"XXX\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn thirty_four() {\n test_roman(34, \"XXXIV\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn thirty_eight() {\n test_roman(38, \"XXXVIII\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn thirty_nine() {\n test_roman(39, \"XXXIX\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn forty() {\n test_roman(40, \"XL\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn forty_eight() {\n test_roman(48, \"XLVIII\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn forty_nine() {\n test_roman(49, \"XLIX\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn fifty() {\n test_roman(50, \"L\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn eighty_eight() {\n test_roman(88, \"LXXXVIII\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn eighty_nine() {\n test_roman(89, \"LXXXIX\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn ninety() {\n test_roman(90, \"XC\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn ninety_eight() {\n test_roman(98, \"XCVIII\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn ninety_nine() {\n test_roman(99, \"XCIX\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn one_hundred() {\n test_roman(100, \"C\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn three_hundred_ninety_nine() {\n test_roman(399, \"CCCXCIX\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn four_hundred() {\n test_roman(400, \"CD\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn four_hundred_ninety_nine() {\n test_roman(499, \"CDXCIX\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn five_hundred() {\n test_roman(500, \"D\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn eight_hundred_ninety_nine() {\n test_roman(899, \"DCCCXCIX\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn nine_hundred() {\n test_roman(900, \"CM\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn nine_nundred_ninety_nine() {\n test_roman(999, \"CMXCIX\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn one_thousand() {\n test_roman(1_000, \"M\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn three_thousand_nine_hundred_ninety_nine() {\n test_roman(3_999, \"MMMCMXCIX\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn bounds_zero() {\n assert!(match arabic_to_roman(0) {\n Ok(_) => false,\n Err(e) => \"Must be greater than 0: 0\" == e,\n })\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn bounds_four_thousand() {\n assert!(match arabic_to_roman(4_000) {\n Ok(_) => false,\n Err(e) => \"Must be less than 4,000: 4000\" == e,\n })\n}\n}" ]
f704da39d6b1a2c5c9da1a7e173a168d567ab611
2,168
rs
Rust
src/problem/p0190_reverse_bits.rs
RUAN0007/leetcode-rust
9d8a05c7e8b5bb9112ea4e46a6ff64cc6902d728
[ "Apache-2.0" ]
1
2022-03-11T07:56:11.000Z
2022-03-11T07:56:11.000Z
src/problem/p0190_reverse_bits.rs
RUAN0007/leetcode-rust
9d8a05c7e8b5bb9112ea4e46a6ff64cc6902d728
[ "Apache-2.0" ]
null
null
null
src/problem/p0190_reverse_bits.rs
RUAN0007/leetcode-rust
9d8a05c7e8b5bb9112ea4e46a6ff64cc6902d728
[ "Apache-2.0" ]
null
null
null
/** * [190] Reverse Bits * * Reverse bits of a given 32 bits unsigned integer. * Note: * * Note that in some languages such as Java, there is no unsigned integer type. In this case, both input and output will be given as a signed integer type. They should not affect your implementation, as the integer's internal binary representation is the same, whether it is signed or unsigned. * In Java, the compiler represents the signed integers using <a href="https://en.wikipedia.org/wiki/Two%27s_complement" target="_blank">2's complement notation</a>. Therefore, in Example 2 above, the input represents the signed integer -3 and the output represents the signed integer -1073741825. * * Follow up: * If this function is called many times, how would you optimize it? * * Example 1: * * Input: n = 00000010100101000001111010011100 * Output: 964176192 (00111001011110000010100101000000) * Explanation: The input binary string 00000010100101000001111010011100 represents the unsigned integer 43261596, so return 964176192 which its binary representation is 00111001011110000010100101000000. * * Example 2: * * Input: n = 11111111111111111111111111111101 * Output: 3221225471 (10111111111111111111111111111111) * Explanation: The input binary string 11111111111111111111111111111101 represents the unsigned integer 4294967293, so return 3221225471 which its binary representation is 10111111111111111111111111111111. * * * Constraints: * * The input must be a binary string of length 32 * */ pub struct Solution {} // problem: https://leetcode.com/problems/reverse-bits/ // discuss: https://leetcode.com/problems/reverse-bits/discuss/?currentPage=1&orderBy=most_votes&query= // submission codes start here impl Solution { pub fn reverse_bits(x: u32) -> u32 { let mut result : u32 = 0; for i in 0..32 { if x & (1 << i) != 0 { result |= 1 << (32 - 1 - i); } } result } } // submission codes end #[cfg(test)] mod tests { use super::*; #[test] fn test_190() { assert_eq!(Solution::reverse_bits(43261596), 964176192); } }
35.540984
298
0.706181
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_190() {\n assert_eq!(Solution::reverse_bits(43261596), 964176192);\n }\n}" ]
f70504b021fda64d350f0d132fae4030a3629603
4,479
rs
Rust
src/fixes/leading_character.rs
PurpleMyst/dotenv-linter
371c3c3d33b3bffaab8bd7abec326ae8239c700b
[ "MIT" ]
null
null
null
src/fixes/leading_character.rs
PurpleMyst/dotenv-linter
371c3c3d33b3bffaab8bd7abec326ae8239c700b
[ "MIT" ]
8
2020-04-24T20:37:04.000Z
2020-07-08T19:53:31.000Z
src/fixes/leading_character.rs
PurpleMyst/dotenv-linter
371c3c3d33b3bffaab8bd7abec326ae8239c700b
[ "MIT" ]
null
null
null
use super::Fix; use crate::common::*; pub(crate) struct LeadingCharacterFixer<'a> { name: &'a str, } impl Default for LeadingCharacterFixer<'_> { fn default() -> Self { Self { name: "LeadingCharacter", } } } impl Fix for LeadingCharacterFixer<'_> { fn name(&self) -> &str { self.name } fn fix_line(&mut self, line: &mut LineEntry) -> Option<()> { let key = line.get_key()?; let cleaned_key = remove_invalid_leading_chars(&key); line.raw_string = format!("{}={}", cleaned_key, line.get_value()?); Some(()) } } #[cfg(test)] mod tests { use super::*; use crate::common::tests::*; #[test] fn fix_leading_dot() { let mut fixer = LeadingCharacterFixer::default(); let mut leading_period = line_entry(1, 1, ".FOO=BAR"); assert_eq!(Some(()), fixer.fix_line(&mut leading_period)); assert_eq!("FOO=BAR", leading_period.raw_string); } #[test] fn fix_leading_space() { let mut fixer = LeadingCharacterFixer::default(); let mut leading_space = line_entry(1, 1, " FOO=BAR"); assert_eq!(Some(()), fixer.fix_line(&mut leading_space)); assert_eq!("FOO=BAR", leading_space.raw_string); } #[test] fn fix_leading_asterisk() { let mut fixer = LeadingCharacterFixer::default(); let mut leading_asterisk = line_entry(1, 1, "*FOO=BAR"); assert_eq!(Some(()), fixer.fix_line(&mut leading_asterisk)); assert_eq!("FOO=BAR", leading_asterisk.raw_string); } #[test] fn fix_leading_number() { let mut fixer = LeadingCharacterFixer::default(); let mut leading_number = line_entry(1, 1, "1FOO=BAR"); assert_eq!(Some(()), fixer.fix_line(&mut leading_number)); assert_eq!("FOO=BAR", leading_number.raw_string); } #[test] fn fix_many_invalid_leading_chars() { let mut fixer = LeadingCharacterFixer::default(); let mut leading_number = line_entry(1, 1, "-1&*FOO=BAR"); assert_eq!(Some(()), fixer.fix_line(&mut leading_number)); assert_eq!("FOO=BAR", leading_number.raw_string); } #[test] fn leading_underscore_is_unchanged() { let mut fixer = LeadingCharacterFixer::default(); let mut leading_underscore = line_entry(1, 1, "_FOO=BAR"); assert_eq!(Some(()), fixer.fix_line(&mut leading_underscore)); assert_eq!("_FOO=BAR", leading_underscore.raw_string); } #[test] fn no_leading_char_is_unchanged() { let mut fixer = LeadingCharacterFixer::default(); let mut normal = line_entry(1, 1, "FOO=BAR"); assert_eq!(Some(()), fixer.fix_line(&mut normal)); assert_eq!("FOO=BAR", normal.raw_string); } #[test] fn fix_warnings_test() { let mut fixer = LeadingCharacterFixer::default(); let mut lines = vec![ line_entry(1, 7, ".FOO=BAR"), line_entry(2, 7, " Z=Y"), line_entry(3, 7, "*BAR=BAZ"), line_entry(4, 7, "1QUX=QUUX"), line_entry(5, 7, "_QUUX=FOOBAR"), line_entry(6, 7, "KEY=VALUE"), blank_line_entry(6, 7), ]; let mut warnings = vec![ Warning::new( lines[0].clone(), "LeadingCharacter", String::from("Invalid leading character detected"), ), Warning::new( lines[1].clone(), "LeadingCharacter", String::from("Invalid leading character detected"), ), Warning::new( lines[2].clone(), "LeadingCharacter", String::from("Invalid leading character detected"), ), Warning::new( lines[3].clone(), "LeadingCharacter", String::from("Invalid leading character detected"), ), ]; assert_eq!( Some(4), fixer.fix_warnings(warnings.iter_mut().collect(), &mut lines) ); assert_eq!("FOO=BAR", lines[0].raw_string); assert_eq!("Z=Y", lines[1].raw_string); assert_eq!("BAR=BAZ", lines[2].raw_string); assert_eq!("QUX=QUUX", lines[3].raw_string); assert_eq!("_QUUX=FOOBAR", lines[4].raw_string); assert_eq!("KEY=VALUE", lines[5].raw_string); assert_eq!("\n", lines[6].raw_string); } }
29.86
75
0.56017
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn fix_leading_dot() {\n let mut fixer = LeadingCharacterFixer::default();\n let mut leading_period = line_entry(1, 1, \".FOO=BAR\");\n\n assert_eq!(Some(()), fixer.fix_line(&mut leading_period));\n assert_eq!(\"FOO=BAR\", leading_period.raw_string);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn fix_leading_space() {\n let mut fixer = LeadingCharacterFixer::default();\n let mut leading_space = line_entry(1, 1, \" FOO=BAR\");\n\n assert_eq!(Some(()), fixer.fix_line(&mut leading_space));\n assert_eq!(\"FOO=BAR\", leading_space.raw_string);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn fix_leading_asterisk() {\n let mut fixer = LeadingCharacterFixer::default();\n let mut leading_asterisk = line_entry(1, 1, \"*FOO=BAR\");\n\n assert_eq!(Some(()), fixer.fix_line(&mut leading_asterisk));\n assert_eq!(\"FOO=BAR\", leading_asterisk.raw_string);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn fix_leading_number() {\n let mut fixer = LeadingCharacterFixer::default();\n let mut leading_number = line_entry(1, 1, \"1FOO=BAR\");\n\n assert_eq!(Some(()), fixer.fix_line(&mut leading_number));\n assert_eq!(\"FOO=BAR\", leading_number.raw_string);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn fix_many_invalid_leading_chars() {\n let mut fixer = LeadingCharacterFixer::default();\n let mut leading_number = line_entry(1, 1, \"-1&*FOO=BAR\");\n\n assert_eq!(Some(()), fixer.fix_line(&mut leading_number));\n assert_eq!(\"FOO=BAR\", leading_number.raw_string);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn leading_underscore_is_unchanged() {\n let mut fixer = LeadingCharacterFixer::default();\n let mut leading_underscore = line_entry(1, 1, \"_FOO=BAR\");\n\n assert_eq!(Some(()), fixer.fix_line(&mut leading_underscore));\n assert_eq!(\"_FOO=BAR\", leading_underscore.raw_string);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn no_leading_char_is_unchanged() {\n let mut fixer = LeadingCharacterFixer::default();\n let mut normal = line_entry(1, 1, \"FOO=BAR\");\n\n assert_eq!(Some(()), fixer.fix_line(&mut normal));\n assert_eq!(\"FOO=BAR\", normal.raw_string);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn fix_warnings_test() {\n let mut fixer = LeadingCharacterFixer::default();\n let mut lines = vec![\n line_entry(1, 7, \".FOO=BAR\"),\n line_entry(2, 7, \" Z=Y\"),\n line_entry(3, 7, \"*BAR=BAZ\"),\n line_entry(4, 7, \"1QUX=QUUX\"),\n line_entry(5, 7, \"_QUUX=FOOBAR\"),\n line_entry(6, 7, \"KEY=VALUE\"),\n blank_line_entry(6, 7),\n ];\n\n let mut warnings = vec![\n Warning::new(\n lines[0].clone(),\n \"LeadingCharacter\",\n String::from(\"Invalid leading character detected\"),\n ),\n Warning::new(\n lines[1].clone(),\n \"LeadingCharacter\",\n String::from(\"Invalid leading character detected\"),\n ),\n Warning::new(\n lines[2].clone(),\n \"LeadingCharacter\",\n String::from(\"Invalid leading character detected\"),\n ),\n Warning::new(\n lines[3].clone(),\n \"LeadingCharacter\",\n String::from(\"Invalid leading character detected\"),\n ),\n ];\n\n assert_eq!(\n Some(4),\n fixer.fix_warnings(warnings.iter_mut().collect(), &mut lines)\n );\n\n assert_eq!(\"FOO=BAR\", lines[0].raw_string);\n assert_eq!(\"Z=Y\", lines[1].raw_string);\n assert_eq!(\"BAR=BAZ\", lines[2].raw_string);\n assert_eq!(\"QUX=QUUX\", lines[3].raw_string);\n assert_eq!(\"_QUUX=FOOBAR\", lines[4].raw_string);\n assert_eq!(\"KEY=VALUE\", lines[5].raw_string);\n assert_eq!(\"\\n\", lines[6].raw_string);\n }\n}" ]
f7050bff5df18b67f31b6c8d06a77934e753f4a8
6,369
rs
Rust
src/dispatching/update_listeners/polling.rs
berkus/teloxide
c7c9ce93e97f4ca6f565d5c4e248677a07215263
[ "MIT" ]
1,117
2019-12-10T13:53:31.000Z
2022-03-30T04:22:46.000Z
src/dispatching/update_listeners/polling.rs
berkus/teloxide
c7c9ce93e97f4ca6f565d5c4e248677a07215263
[ "MIT" ]
264
2019-11-30T19:43:16.000Z
2022-03-27T09:30:19.000Z
src/dispatching/update_listeners/polling.rs
berkus/teloxide
c7c9ce93e97f4ca6f565d5c4e248677a07215263
[ "MIT" ]
122
2019-12-18T10:34:30.000Z
2022-03-04T13:59:52.000Z
use std::{convert::TryInto, time::Duration}; use futures::{ future::{ready, Either}, stream::{self, Stream, StreamExt}, }; use crate::{ dispatching::{ stop_token::{AsyncStopFlag, AsyncStopToken}, update_listeners::{stateful_listener::StatefulListener, UpdateListener}, }, payloads::GetUpdates, requests::{HasPayload, Request, Requester}, types::{AllowedUpdate, SemiparsedVec, Update}, }; /// Returns a long polling update listener with `timeout` of 10 seconds. /// /// See also: [`polling`](polling). /// /// ## Notes /// /// This function will automatically delete a webhook if it was set up. pub async fn polling_default<R>(requester: R) -> impl UpdateListener<R::Err> where R: Requester + Send + 'static, <R as Requester>::GetUpdatesFaultTolerant: Send, { delete_webhook_if_setup(&requester).await; polling(requester, Some(Duration::from_secs(10)), None, None) } /// Returns a long/short polling update listener with some additional options. /// /// - `bot`: Using this bot, the returned update listener will receive updates. /// - `timeout`: A timeout for polling. /// - `limit`: Limits the number of updates to be retrieved at once. Values /// between 1—100 are accepted. /// - `allowed_updates`: A list the types of updates you want to receive. /// See [`GetUpdates`] for defaults. /// /// See also: [`polling_default`](polling_default). /// /// [`GetUpdates`]: crate::payloads::GetUpdates pub fn polling<R>( requester: R, timeout: Option<Duration>, limit: Option<u8>, allowed_updates: Option<Vec<AllowedUpdate>>, ) -> impl UpdateListener<R::Err> where R: Requester + Send + 'static, <R as Requester>::GetUpdatesFaultTolerant: Send, { struct State<B: Requester> { bot: B, timeout: Option<u32>, limit: Option<u8>, allowed_updates: Option<Vec<AllowedUpdate>>, offset: i32, flag: AsyncStopFlag, token: AsyncStopToken, } fn stream<B>(st: &mut State<B>) -> impl Stream<Item = Result<Update, B::Err>> + Send + '_ where B: Requester + Send, <B as Requester>::GetUpdatesFaultTolerant: Send, { stream::unfold(st, move |state| async move { let State { timeout, limit, allowed_updates, bot, offset, flag, .. } = &mut *state; if flag.is_stopped() { let mut req = bot.get_updates_fault_tolerant(); req.payload_mut().0 = GetUpdates { offset: Some(*offset), timeout: Some(0), limit: Some(1), allowed_updates: allowed_updates.take(), }; return match req.send().await { Ok(_) => None, Err(err) => Some((Either::Left(stream::once(ready(Err(err)))), state)), }; } let mut req = bot.get_updates_fault_tolerant(); req.payload_mut().0 = GetUpdates { offset: Some(*offset), timeout: *timeout, limit: *limit, allowed_updates: allowed_updates.take(), }; let updates = match req.send().await { Err(err) => return Some((Either::Left(stream::once(ready(Err(err)))), state)), Ok(SemiparsedVec(updates)) => { // Set offset to the last update's id + 1 if let Some(upd) = updates.last() { let id: i32 = match upd { Ok(ok) => ok.id, Err((value, _)) => value["update_id"] .as_i64() .expect("The 'update_id' field must always exist in Update") .try_into() .expect("update_id must be i32"), }; *offset = id + 1; } for update in &updates { if let Err((value, e)) = update { log::error!( "Cannot parse an update.\nError: {:?}\nValue: {}\n\ This is a bug in teloxide-core, please open an issue here: \ https://github.com/teloxide/teloxide-core/issues.", e, value ); } } updates.into_iter().filter_map(Result::ok).map(Ok) } }; Some((Either::Right(stream::iter(updates)), state)) }) .flatten() } let (token, flag) = AsyncStopToken::new_pair(); let state = State { bot: requester, timeout: timeout.map(|t| t.as_secs().try_into().expect("timeout is too big")), limit, allowed_updates, offset: 0, flag, token, }; let stop_token = |st: &mut State<_>| st.token.clone(); let hint_allowed_updates = Some(|state: &mut State<_>, allowed: &mut dyn Iterator<Item = AllowedUpdate>| { // TODO: we should probably warn if there already were different allowed updates // before state.allowed_updates = Some(allowed.collect()); }); let timeout_hint = Some(move |_: &State<_>| timeout); StatefulListener::new_with_hints(state, stream, stop_token, hint_allowed_updates, timeout_hint) } async fn delete_webhook_if_setup<R>(requester: &R) where R: Requester, { let webhook_info = match requester.get_webhook_info().send().await { Ok(ok) => ok, Err(e) => { log::error!("Failed to get webhook info: {:?}", e); return; } }; let is_webhook_setup = !webhook_info.url.is_empty(); if is_webhook_setup { if let Err(e) = requester.delete_webhook().send().await { log::error!("Failed to delete a webhook: {:?}", e); } } } #[test] fn polling_is_send() { use crate::dispatching::update_listeners::AsUpdateStream; let bot = crate::Bot::new("TOKEN"); let mut polling = polling(bot, None, None, None); assert_send(&polling); assert_send(&polling.as_stream()); fn assert_send(_: &impl Send) {} }
32.829897
99
0.534621
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn polling_is_send() {\n use crate::dispatching::update_listeners::AsUpdateStream;\n\n let bot = crate::Bot::new(\"TOKEN\");\n let mut polling = polling(bot, None, None, None);\n\n assert_send(&polling);\n assert_send(&polling.as_stream());\n\n fn assert_send(_: &impl Send) {}\n}\n}" ]
f7054d0c6b3cda6589cdd350c08e2815fe871087
3,800
rs
Rust
tests/clones.rs
azriel91/rayon
067a294cbd967940723a49ea5c9b1312a5b1b5ac
[ "Apache-2.0", "MIT" ]
null
null
null
tests/clones.rs
azriel91/rayon
067a294cbd967940723a49ea5c9b1312a5b1b5ac
[ "Apache-2.0", "MIT" ]
null
null
null
tests/clones.rs
azriel91/rayon
067a294cbd967940723a49ea5c9b1312a5b1b5ac
[ "Apache-2.0", "MIT" ]
null
null
null
extern crate rayon; use rayon::prelude::*; fn check<I>(iter: I) where I: ParallelIterator + Clone, I::Item: std::fmt::Debug + PartialEq { let a: Vec<_> = iter.clone().collect(); let b: Vec<_> = iter.collect(); assert_eq!(a, b); } #[test] fn clone_binary_heap() { use std::collections::BinaryHeap; let heap: BinaryHeap<_> = (0..1000).collect(); check(heap.par_iter()); check(heap.into_par_iter()); } #[test] fn clone_btree_map() { use std::collections::BTreeMap; let map: BTreeMap<_,_> = (0..1000).enumerate().collect(); check(map.par_iter()); } #[test] fn clone_btree_set() { use std::collections::BTreeSet; let set: BTreeSet<_> = (0..1000).collect(); check(set.par_iter()); } #[test] fn clone_hash_map() { use std::collections::HashMap; let map: HashMap<_,_> = (0..1000).enumerate().collect(); check(map.par_iter()); } #[test] fn clone_hash_set() { use std::collections::HashSet; let set: HashSet<_> = (0..1000).collect(); check(set.par_iter()); } #[test] fn clone_linked_list() { use std::collections::LinkedList; let list: LinkedList<_> = (0..1000).collect(); check(list.par_iter()); check(list.into_par_iter()); } #[test] fn clone_vec_deque() { use std::collections::VecDeque; let deque: VecDeque<_> = (0..1000).collect(); check(deque.par_iter()); check(deque.into_par_iter()); } #[test] fn clone_option() { let option = Some(0); check(option.par_iter()); check(option.into_par_iter()); } #[test] fn clone_result() { let result = Ok::<_, ()>(0); check(result.par_iter()); check(result.into_par_iter()); } #[test] fn clone_range() { check((0..1000).into_par_iter()); } #[test] fn clone_str() { let s = include_str!("clones.rs"); check(s.par_chars()); check(s.par_lines()); check(s.par_split('\n')); check(s.par_split_terminator('\n')); check(s.par_split_whitespace()); } #[test] fn clone_vec() { let v: Vec<_> = (0..1000).collect(); check(v.par_iter()); check(v.par_chunks(42)); check(v.par_windows(42)); check(v.par_split(|x| x % 3 == 0)); check(v.into_par_iter()); } #[test] fn clone_adaptors() { let v: Vec<_> = (0..1000).map(Some).collect(); check(v.par_iter().chain(&v)); check(v.par_iter().cloned()); check(v.par_iter().enumerate()); check(v.par_iter().filter(|_| true)); check(v.par_iter().filter_map(|x| *x)); check(v.par_iter().flat_map(|x| *x)); check(v.par_iter().flatten()); check(v.par_iter().with_max_len(1).fold(|| 0, |x, _| x)); check(v.par_iter().with_max_len(1).fold_with(0, |x, _| x)); check(v.par_iter().with_max_len(1).try_fold(|| 0, |_, &x| x)); check(v.par_iter().with_max_len(1).try_fold_with(0, |_, &x| x)); check(v.par_iter().inspect(|_| ())); check(v.par_iter().update(|_| ())); check(v.par_iter().interleave(&v)); check(v.par_iter().interleave_shortest(&v)); check(v.par_iter().intersperse(&None)); check(v.par_iter().chunks(3)); check(v.par_iter().map(|x| x)); check(v.par_iter().map_with(0, |_, x| x)); check(v.par_iter().rev()); check(v.par_iter().skip(1)); check(v.par_iter().take(1)); check(v.par_iter().cloned().while_some()); check(v.par_iter().with_max_len(1)); check(v.par_iter().with_min_len(1)); check(v.par_iter().zip(&v)); check(v.par_iter().zip_eq(&v)); } #[test] fn clone_empty() { check(rayon::iter::empty::<i32>()); } #[test] fn clone_once() { check(rayon::iter::once(10)); } #[test] fn clone_repeat() { let x: Option<i32> = None; check(rayon::iter::repeat(x).while_some()); check(rayon::iter::repeatn(x, 1000)); } #[test] fn clone_splitter() { check(rayon::iter::split(0..1000, |x| (x, None))); }
23.899371
68
0.6
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn clone_binary_heap() {\n use std::collections::BinaryHeap;\n let heap: BinaryHeap<_> = (0..1000).collect();\n check(heap.par_iter());\n check(heap.into_par_iter());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn clone_btree_map() {\n use std::collections::BTreeMap;\n let map: BTreeMap<_,_> = (0..1000).enumerate().collect();\n check(map.par_iter());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn clone_btree_set() {\n use std::collections::BTreeSet;\n let set: BTreeSet<_> = (0..1000).collect();\n check(set.par_iter());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn clone_hash_map() {\n use std::collections::HashMap;\n let map: HashMap<_,_> = (0..1000).enumerate().collect();\n check(map.par_iter());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn clone_hash_set() {\n use std::collections::HashSet;\n let set: HashSet<_> = (0..1000).collect();\n check(set.par_iter());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn clone_linked_list() {\n use std::collections::LinkedList;\n let list: LinkedList<_> = (0..1000).collect();\n check(list.par_iter());\n check(list.into_par_iter());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn clone_vec_deque() {\n use std::collections::VecDeque;\n let deque: VecDeque<_> = (0..1000).collect();\n check(deque.par_iter());\n check(deque.into_par_iter());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn clone_option() {\n let option = Some(0);\n check(option.par_iter());\n check(option.into_par_iter());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn clone_result() {\n let result = Ok::<_, ()>(0);\n check(result.par_iter());\n check(result.into_par_iter());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn clone_range() {\n check((0..1000).into_par_iter());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn clone_str() {\n let s = include_str!(\"clones.rs\");\n check(s.par_chars());\n check(s.par_lines());\n check(s.par_split('\\n'));\n check(s.par_split_terminator('\\n'));\n check(s.par_split_whitespace());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn clone_vec() {\n let v: Vec<_> = (0..1000).collect();\n check(v.par_iter());\n check(v.par_chunks(42));\n check(v.par_windows(42));\n check(v.par_split(|x| x % 3 == 0));\n check(v.into_par_iter());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn clone_adaptors() {\n let v: Vec<_> = (0..1000).map(Some).collect();\n check(v.par_iter().chain(&v));\n check(v.par_iter().cloned());\n check(v.par_iter().enumerate());\n check(v.par_iter().filter(|_| true));\n check(v.par_iter().filter_map(|x| *x));\n check(v.par_iter().flat_map(|x| *x));\n check(v.par_iter().flatten());\n check(v.par_iter().with_max_len(1).fold(|| 0, |x, _| x));\n check(v.par_iter().with_max_len(1).fold_with(0, |x, _| x));\n check(v.par_iter().with_max_len(1).try_fold(|| 0, |_, &x| x));\n check(v.par_iter().with_max_len(1).try_fold_with(0, |_, &x| x));\n check(v.par_iter().inspect(|_| ()));\n check(v.par_iter().update(|_| ()));\n check(v.par_iter().interleave(&v));\n check(v.par_iter().interleave_shortest(&v));\n check(v.par_iter().intersperse(&None));\n check(v.par_iter().chunks(3));\n check(v.par_iter().map(|x| x));\n check(v.par_iter().map_with(0, |_, x| x));\n check(v.par_iter().rev());\n check(v.par_iter().skip(1));\n check(v.par_iter().take(1));\n check(v.par_iter().cloned().while_some());\n check(v.par_iter().with_max_len(1));\n check(v.par_iter().with_min_len(1));\n check(v.par_iter().zip(&v));\n check(v.par_iter().zip_eq(&v));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn clone_empty() {\n check(rayon::iter::empty::<i32>());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn clone_once() {\n check(rayon::iter::once(10));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn clone_repeat() {\n let x: Option<i32> = None;\n check(rayon::iter::repeat(x).while_some());\n check(rayon::iter::repeatn(x, 1000));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn clone_splitter() {\n check(rayon::iter::split(0..1000, |x| (x, None)));\n}\n}" ]
f70565cd4da6e94b5486c1c7becc13dc3a605169
9,667
rs
Rust
proto/src/text.rs
first-rust-competition/nt4-mvp
10c850a46419e575927b32866fd2bf505dc52211
[ "Apache-2.0" ]
null
null
null
proto/src/text.rs
first-rust-competition/nt4-mvp
10c850a46419e575927b32866fd2bf505dc52211
[ "Apache-2.0" ]
null
null
null
proto/src/text.rs
first-rust-competition/nt4-mvp
10c850a46419e575927b32866fd2bf505dc52211
[ "Apache-2.0" ]
null
null
null
use crate::bin::NTValue; use crate::text::directory::*; use crate::text::publish::*; use crate::text::subscription::*; use serde::{Deserialize, Serialize}; use serde_json::Value; macro_rules! impl_message { ($($name:ident),+) => { $( impl MessageBody for $name { fn into_message(self) -> $crate::text::NTTextMessage { $crate::text::NTTextMessage { _type: $crate::text::MessageType::$name, data: serde_json::to_value(self).unwrap() } } } )+ } } pub mod directory; pub mod publish; pub mod subscription; pub trait MessageBody { fn into_message(self) -> NTTextMessage; } /// The type of the message that is being sent or received #[derive(Serialize, Deserialize, Debug, PartialEq)] #[serde(rename_all = "lowercase")] pub enum MessageType { /// Publish Request Message /// Direction: Client to Server /// Response: Publish Acknowledge /// /// Sent from a client to the server to indicate the client wants to start publishing values at the given NetworkTables key. /// The server will respond with a “puback” message. /// Once the client receives the “puback” message it can start publishing data value updates via binary CBOR messages. #[serde(rename = "publish")] PublishReq, /// Publish Release Message /// Direction: Client to Server /// /// Sent from a client to the server to indicate the client wants to stop publishing values at the given NetworkTables key. /// The client may also request the key be deleted. /// The client **must** stop publishing data value updates via binary CBOR messages prior to sending this message. #[serde(rename = "pubrel")] PublishRel, /// Set Flags Message /// Direction: Client to Server /// /// Sent from a client to the server to set or clear flags for a given topic. /// The server will respond with an updated “announce” message. SetFlags, /// Key Announcement Message /// Direction: Server to Client /// /// Sent from the server to a client with an announcement listener covering the key. /// The server shall send this message either initially after receiving Start Announcements from a client, /// or when new keys are created with the prefix specified. Announce, /// Key Removed Message /// Direction: Server to Client /// /// Sent from the server to a client with an announcement listener covering the key. /// The server shall send this message when a previously announced (via an “announce” message) key is deleted. Unannounce, /// Get Values Message /// Direction: Client to Server /// Response: Values over CBOR /// /// Sent from a client to the server to indicate the client wants to get the current values for the specified keys (identifiers). /// The server shall send CBOR messages containing the current values immediately upon receipt. /// While this message could theoretically be used to poll for value updates, it is much better to use the “subscribe” message to request periodic push updates. GetValues, /// Subscribe Message /// Direction: Client to Server /// Response: Values over CBOR /// /// Sent from a client to the server to indicate the client wants to subscribe to value changes for the specified keys (identifiers). /// The server shall send CBOR messages containing the current values upon receipt, and continue sending CBOR messages for future value changes. /// Subscriptions may overlap; only one CBOR message is sent per value change regardless of the number of subscriptions. /// Sending a “subscribe” message with the same subscription UID as a previous “subscribe” message results in updating the subscription (replacing the array of identifiers and updating any specified options). Subscribe, /// Unsubscribe Message /// Direction: Client to Server /// /// Sent from a client to the server to indicate the client wants to stop subscribing to value changes for the given subscription. Unsubscribe, } /// An enum containing the structs representing each text message, the explanation of each message can be found in the documentation for [`MessageType`] /// /// [`MessageType`]: ./enum.MessageType.html #[derive(Debug, PartialEq)] pub enum MessageValue { PublishReq(PublishReq), PublishRel(PublishRel), SetFlags(SetFlags), Announce(Announce), Unannounce(Unannounce), GetValues(GetValues), Subscribe(Subscribe), Unsubscribe(Unsubscribe), } /// An enum representation of the acceptable data types in NTv4 #[derive(Serialize, Deserialize, Debug, PartialEq, Copy, Clone)] #[serde(rename_all = "lowercase")] pub enum DataType { /// Represents a boolean, true or false Boolean, /// Represents a sequence of raw bytes Raw, /// Represents a Remote Procedure Call declaration RPC, /// Represents a sequence of bytes representing a String String, /// Represents a signed 64-bit integer Int, /// Represents an IEEE754 single-precision floating-point number Float, /// Represents an IEEE754 double-precision floating-point number Double, /// Represents an array of Booleans #[serde(rename = "boolean[]")] BooleanArray, /// Represents an array of Strings #[serde(rename = "string[]")] StringArray, /// Represents an array of Integers #[serde(rename = "int[]")] IntArray, /// Represents an array of Floats #[serde(rename = "float[]")] FloatArray, /// Represents an array of Doubles #[serde(rename = "double[]")] DoubleArray, } impl DataType { pub fn default_value(&self) -> NTValue { match self { DataType::Int => NTValue::Int(0), DataType::Boolean => NTValue::Boolean(false), DataType::Raw => NTValue::Raw(vec![]), DataType::RPC => NTValue::RPC(vec![]), DataType::String => NTValue::String(String::new()), DataType::Float => NTValue::Float(0f32), DataType::Double => NTValue::Double(0.0), DataType::BooleanArray => NTValue::BooleanArray(vec![]), DataType::StringArray => NTValue::StringArray(vec![]), DataType::IntArray => NTValue::IntArray(vec![]), DataType::FloatArray => NTValue::FloatArray(vec![]), DataType::DoubleArray => NTValue::DoubleArray(vec![]), } } } impl Into<u8> for DataType { fn into(self) -> u8 { match self { DataType::Boolean => 0, DataType::Double => 1, DataType::Int => 2, DataType::Float => 3, DataType::String => 4, DataType::Raw => 5, DataType::RPC => 6, DataType::BooleanArray => 16, DataType::DoubleArray => 17, DataType::IntArray => 18, DataType::FloatArray => 19, DataType::StringArray => 20, } } } /// The most generic struct representing a textual message transmitted in NT4 /// /// This struct should probably not be used directly, and instead can be constructed from the implementors of [`MessageBody`], found in submodules /// These implementors are strongly typed equivalents to the `data` field on this type, and contain more information about how they should be used. /// /// [`MessageBody`]: ./trait.MessageBody.html #[derive(Serialize, Deserialize, PartialEq, Debug)] pub struct NTTextMessage { #[serde(rename = "type")] _type: MessageType, data: Value, } macro_rules! to_data_body { ($self:ident, $($ty:ident),+) => { match $self._type { $( MessageType::$ty => match serde_json::from_value::<$ty>($self.data) { Ok(value) => Ok(MessageValue::$ty(value)), Err(e) => Err(e), } )+ } } } impl NTTextMessage { /// Decodes the `Value` stored in `self` as a strongly typed struct depending on the value of `self._type` /// /// Returns the value wrapped inside the [`MessageValue`] enum. /// /// [`MessageValue`]: ./enum.MessageValue.html pub fn data(self) -> serde_json::Result<MessageValue> { use self::directory::*; use self::publish::*; use self::subscription::*; to_data_body!( self, PublishReq, PublishRel, SetFlags, Announce, Unannounce, GetValues, Subscribe, Unsubscribe ) } } #[cfg(test)] mod tests { use crate::text::publish::{PublishReq, SetFlags}; use crate::text::{DataType, MessageBody, MessageType, MessageValue, NTTextMessage}; #[test] fn test_de() { let msg = r#"{"type":"publish", "data": {"name": "/foo", "type": "integer"}}"#; let msg = serde_json::from_str::<NTTextMessage>(msg).unwrap(); assert_eq!(msg._type, MessageType::PublishReq); assert_eq!( msg.data(), MessageValue::PublishReq(PublishReq { name: "/foo".to_string(), _type: DataType::Int, }) ); } #[test] fn test_ser() { let msg = SetFlags { name: "/foo".to_string(), add: vec!["persistent".to_string()], remove: vec!["bolb".to_string()], }; assert_eq!( serde_json::to_string(&msg.into_message()).unwrap(), r#"{"type":"setflags","data":{"add":["persistent"],"name":"/foo","remove":["bolb"]}}"# ) } }
36.342105
212
0.623668
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_de() {\n let msg = r#\"{\"type\":\"publish\", \"data\": {\"name\": \"/foo\", \"type\": \"integer\"}}\"#;\n let msg = serde_json::from_str::<NTTextMessage>(msg).unwrap();\n assert_eq!(msg._type, MessageType::PublishReq);\n assert_eq!(\n msg.data(),\n MessageValue::PublishReq(PublishReq {\n name: \"/foo\".to_string(),\n _type: DataType::Int,\n })\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_ser() {\n let msg = SetFlags {\n name: \"/foo\".to_string(),\n add: vec![\"persistent\".to_string()],\n remove: vec![\"bolb\".to_string()],\n };\n\n assert_eq!(\n serde_json::to_string(&msg.into_message()).unwrap(),\n r#\"{\"type\":\"setflags\",\"data\":{\"add\":[\"persistent\"],\"name\":\"/foo\",\"remove\":[\"bolb\"]}}\"#\n )\n }\n}" ]
f70569895328584dbaf556e70c4971555600f5a1
9,483
rs
Rust
fj-app/src/kernel/algorithms/approximation.rs
mxdamien/Fornjot
750a668e72d1630b1ac0d71f8872f260fde5155f
[ "0BSD" ]
null
null
null
fj-app/src/kernel/algorithms/approximation.rs
mxdamien/Fornjot
750a668e72d1630b1ac0d71f8872f260fde5155f
[ "0BSD" ]
17
2022-01-31T11:26:08.000Z
2022-03-14T18:59:25.000Z
fj-app/src/kernel/algorithms/approximation.rs
mxdamien/Fornjot
750a668e72d1630b1ac0d71f8872f260fde5155f
[ "0BSD" ]
null
null
null
use std::collections::HashSet; use crate::{ kernel::topology::{ edges::{Cycle, Edge}, faces::Face, vertices::Vertex, }, math::{Point, Scalar, Segment}, }; /// An approximation of an edge, multiple edges, or a face #[derive(Debug, PartialEq)] pub struct Approximation { /// All points that make up the approximation /// /// These could be actual vertices from the model, points that approximate /// an edge, or points that approximate a face. pub points: HashSet<Point<3>>, /// Segments that approximate edges /// /// Every approximation will involve edges, typically, and these are /// approximated by these segments. /// /// All the points of these segments will also be available in the `points` /// field of this struct. pub segments: HashSet<Segment<3>>, } impl Approximation { /// Compute an approximate for an edge /// /// `tolerance` defines how far the approximation is allowed to deviate from /// the actual edge. pub fn for_edge(edge: &Edge, tolerance: Scalar) -> Self { let mut points = Vec::new(); edge.curve().approx(tolerance, &mut points); approximate_edge(points, edge.vertices()) } /// Compute an approximation for a cycle /// /// `tolerance` defines how far the approximation is allowed to deviate from /// the actual cycle. pub fn for_cycle(cycle: &Cycle, tolerance: Scalar) -> Self { let mut points = HashSet::new(); let mut segments = HashSet::new(); for edge in cycle.edges() { let approx = Self::for_edge(&edge, tolerance); points.extend(approx.points); segments.extend(approx.segments); } Self { points, segments } } /// Compute an approximation for a face /// /// `tolerance` defines how far the approximation is allowed to deviate from /// the actual edges. pub fn for_face(face: &Face, tolerance: Scalar) -> Self { // Curved faces whose curvature is not fully defined by their edges // are not supported yet. For that reason, we can fully ignore `face`'s // `surface` field and just pass the edges to `Self::for_edges`. // // An example of a curved face that is supported, is the cylinder. Its // curvature is fully defined be the edges (circles) that border it. The // circle approximations are sufficient to triangulate the surface. // // An example of a curved face that is currently not supported, and thus // doesn't need to be handled here, is a sphere. A spherical face would // would need to provide its own approximation, as the edges that bound // it have nothing to do with its curvature. let mut points = HashSet::new(); let mut segments = HashSet::new(); for cycle in face.cycles() { let approx = Self::for_cycle(&cycle, tolerance); points.extend(approx.points); segments.extend(approx.segments); } Self { points, segments } } } fn approximate_edge( mut points: Vec<Point<3>>, vertices: Option<[Vertex; 2]>, ) -> Approximation { // Insert the exact vertices of this edge into the approximation. This means // we don't rely on the curve approximation to deliver accurate // representations of these vertices, which they might not be able to do. // // If we used inaccurate representations of those vertices here, then that // would lead to bugs in the approximation, as points that should refer to // the same vertex would be understood to refer to very close, but distinct // vertices. if let Some([a, b]) = &vertices { points.insert(0, a.point()); points.push(b.point()); } let mut segment_points = points.clone(); if vertices.is_none() { // The edge has no vertices, which means it connects to itself. We need // to reflect that in the approximation. if let Some(&point) = points.first() { segment_points.push(point); } } let mut segments = HashSet::new(); for segment in segment_points.windows(2) { let p0 = segment[0]; let p1 = segment[1]; segments.insert(Segment::from([p0, p1])); } Approximation { points: points.into_iter().collect(), segments, } } #[cfg(test)] mod tests { use map_macro::set; use crate::{ kernel::{ geometry::Surface, shape::Shape, topology::{edges::Cycle, faces::Face, vertices::Vertex}, }, math::{Point, Scalar, Segment}, }; use super::{approximate_edge, Approximation}; #[test] fn for_edge() { // Doesn't test `Approximation::for_edge` directly, but that method only // contains a bit of additional glue code that is not critical. let mut shape = Shape::new(); let a = Point::from([1., 2., 3.]); let b = Point::from([2., 3., 5.]); let c = Point::from([3., 5., 8.]); let d = Point::from([5., 8., 13.]); let v1 = shape.geometry().add_point(a); let v2 = shape.geometry().add_point(d); let v1 = shape.topology().add_vertex(Vertex { point: v1 }).unwrap(); let v2 = shape.topology().add_vertex(Vertex { point: v2 }).unwrap(); let points = vec![b, c]; // Regular edge assert_eq!( approximate_edge( points.clone(), Some([v1.get().clone(), v2.get().clone()]) ), Approximation { points: set![a, b, c, d], segments: set![ Segment::from([a, b]), Segment::from([b, c]), Segment::from([c, d]), ], } ); // Continuous edge assert_eq!( approximate_edge(points, None), Approximation { points: set![b, c], segments: set![Segment::from([b, c]), Segment::from([c, b])], } ); } #[test] fn for_cycle() { let tolerance = Scalar::ONE; let mut shape = Shape::new(); let a = Point::from([1., 2., 3.]); let b = Point::from([2., 3., 5.]); let c = Point::from([3., 5., 8.]); let v1 = shape.geometry().add_point(a); let v2 = shape.geometry().add_point(b); let v3 = shape.geometry().add_point(c); let v1 = shape.topology().add_vertex(Vertex { point: v1 }).unwrap(); let v2 = shape.topology().add_vertex(Vertex { point: v2 }).unwrap(); let v3 = shape.topology().add_vertex(Vertex { point: v3 }).unwrap(); let ab = shape .topology() .add_line_segment([v1.clone(), v2.clone()]) .unwrap(); let bc = shape.topology().add_line_segment([v2, v3.clone()]).unwrap(); let ca = shape.topology().add_line_segment([v3, v1]).unwrap(); let cycle = Cycle { edges: vec![ab, bc, ca], }; assert_eq!( Approximation::for_cycle(&cycle, tolerance), Approximation { points: set![a, b, c], segments: set![ Segment::from([a, b]), Segment::from([b, c]), Segment::from([c, a]), ], } ); } #[test] fn for_face_closed() { // Test a closed face, i.e. one that is completely encircled by edges. let tolerance = Scalar::ONE; let mut shape = Shape::new(); let a = Point::from([1., 2., 3.]); let b = Point::from([2., 3., 5.]); let c = Point::from([3., 5., 8.]); let d = Point::from([5., 8., 13.]); let v1 = shape.geometry().add_point(a); let v2 = shape.geometry().add_point(b); let v3 = shape.geometry().add_point(c); let v4 = shape.geometry().add_point(d); let v1 = shape.topology().add_vertex(Vertex { point: v1 }).unwrap(); let v2 = shape.topology().add_vertex(Vertex { point: v2 }).unwrap(); let v3 = shape.topology().add_vertex(Vertex { point: v3 }).unwrap(); let v4 = shape.topology().add_vertex(Vertex { point: v4 }).unwrap(); let ab = shape .topology() .add_line_segment([v1.clone(), v2.clone()]) .unwrap(); let bc = shape.topology().add_line_segment([v2, v3.clone()]).unwrap(); let cd = shape.topology().add_line_segment([v3, v4.clone()]).unwrap(); let da = shape.topology().add_line_segment([v4, v1]).unwrap(); let abcd = shape .topology() .add_cycle(Cycle { edges: vec![ab, bc, cd, da], }) .unwrap(); let surface = shape.geometry().add_surface(Surface::x_y_plane()); let face = Face::Face { surface, cycles: vec![abcd], color: [255, 0, 0, 255], }; assert_eq!( Approximation::for_face(&face, tolerance), Approximation { points: set![a, b, c, d], segments: set![ Segment::from([a, b]), Segment::from([b, c]), Segment::from([c, d]), Segment::from([d, a]), ], } ); } }
32.145763
80
0.541495
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn for_edge() {\n // Doesn't test `Approximation::for_edge` directly, but that method only\n // contains a bit of additional glue code that is not critical.\n\n let mut shape = Shape::new();\n\n let a = Point::from([1., 2., 3.]);\n let b = Point::from([2., 3., 5.]);\n let c = Point::from([3., 5., 8.]);\n let d = Point::from([5., 8., 13.]);\n\n let v1 = shape.geometry().add_point(a);\n let v2 = shape.geometry().add_point(d);\n\n let v1 = shape.topology().add_vertex(Vertex { point: v1 }).unwrap();\n let v2 = shape.topology().add_vertex(Vertex { point: v2 }).unwrap();\n\n let points = vec![b, c];\n\n // Regular edge\n assert_eq!(\n approximate_edge(\n points.clone(),\n Some([v1.get().clone(), v2.get().clone()])\n ),\n Approximation {\n points: set![a, b, c, d],\n segments: set![\n Segment::from([a, b]),\n Segment::from([b, c]),\n Segment::from([c, d]),\n ],\n }\n );\n\n // Continuous edge\n assert_eq!(\n approximate_edge(points, None),\n Approximation {\n points: set![b, c],\n segments: set![Segment::from([b, c]), Segment::from([c, b])],\n }\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn for_cycle() {\n let tolerance = Scalar::ONE;\n\n let mut shape = Shape::new();\n\n let a = Point::from([1., 2., 3.]);\n let b = Point::from([2., 3., 5.]);\n let c = Point::from([3., 5., 8.]);\n\n let v1 = shape.geometry().add_point(a);\n let v2 = shape.geometry().add_point(b);\n let v3 = shape.geometry().add_point(c);\n\n let v1 = shape.topology().add_vertex(Vertex { point: v1 }).unwrap();\n let v2 = shape.topology().add_vertex(Vertex { point: v2 }).unwrap();\n let v3 = shape.topology().add_vertex(Vertex { point: v3 }).unwrap();\n\n let ab = shape\n .topology()\n .add_line_segment([v1.clone(), v2.clone()])\n .unwrap();\n let bc = shape.topology().add_line_segment([v2, v3.clone()]).unwrap();\n let ca = shape.topology().add_line_segment([v3, v1]).unwrap();\n\n let cycle = Cycle {\n edges: vec![ab, bc, ca],\n };\n\n assert_eq!(\n Approximation::for_cycle(&cycle, tolerance),\n Approximation {\n points: set![a, b, c],\n segments: set![\n Segment::from([a, b]),\n Segment::from([b, c]),\n Segment::from([c, a]),\n ],\n }\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn for_face_closed() {\n // Test a closed face, i.e. one that is completely encircled by edges.\n\n let tolerance = Scalar::ONE;\n\n let mut shape = Shape::new();\n\n let a = Point::from([1., 2., 3.]);\n let b = Point::from([2., 3., 5.]);\n let c = Point::from([3., 5., 8.]);\n let d = Point::from([5., 8., 13.]);\n\n let v1 = shape.geometry().add_point(a);\n let v2 = shape.geometry().add_point(b);\n let v3 = shape.geometry().add_point(c);\n let v4 = shape.geometry().add_point(d);\n\n let v1 = shape.topology().add_vertex(Vertex { point: v1 }).unwrap();\n let v2 = shape.topology().add_vertex(Vertex { point: v2 }).unwrap();\n let v3 = shape.topology().add_vertex(Vertex { point: v3 }).unwrap();\n let v4 = shape.topology().add_vertex(Vertex { point: v4 }).unwrap();\n\n let ab = shape\n .topology()\n .add_line_segment([v1.clone(), v2.clone()])\n .unwrap();\n let bc = shape.topology().add_line_segment([v2, v3.clone()]).unwrap();\n let cd = shape.topology().add_line_segment([v3, v4.clone()]).unwrap();\n let da = shape.topology().add_line_segment([v4, v1]).unwrap();\n\n let abcd = shape\n .topology()\n .add_cycle(Cycle {\n edges: vec![ab, bc, cd, da],\n })\n .unwrap();\n\n let surface = shape.geometry().add_surface(Surface::x_y_plane());\n let face = Face::Face {\n surface,\n cycles: vec![abcd],\n color: [255, 0, 0, 255],\n };\n\n assert_eq!(\n Approximation::for_face(&face, tolerance),\n Approximation {\n points: set![a, b, c, d],\n segments: set![\n Segment::from([a, b]),\n Segment::from([b, c]),\n Segment::from([c, d]),\n Segment::from([d, a]),\n ],\n }\n );\n }\n}" ]
f7056abbd92d63c59d4548b020a54f5da1b55e2e
51,792
rs
Rust
write_buffer/src/table.rs
CJP10/influxdb_iox
cdb26e60e4531dddbe0e40c6e571ff1b6de20909
[ "Apache-2.0", "MIT" ]
null
null
null
write_buffer/src/table.rs
CJP10/influxdb_iox
cdb26e60e4531dddbe0e40c6e571ff1b6de20909
[ "Apache-2.0", "MIT" ]
null
null
null
write_buffer/src/table.rs
CJP10/influxdb_iox
cdb26e60e4531dddbe0e40c6e571ff1b6de20909
[ "Apache-2.0", "MIT" ]
null
null
null
use generated_types::wal as wb; use query::exec::{make_schema_pivot, GroupedSeriesSetPlan, SeriesSetPlan}; use tracing::debug; use std::{collections::BTreeSet, collections::HashMap, sync::Arc}; use crate::{ column, column::Column, dictionary::{Dictionary, Error as DictionaryError}, partition::PartitionIdSet, partition::{Partition, PartitionPredicate}, }; use data_types::TIME_COLUMN_NAME; use snafu::{OptionExt, ResultExt, Snafu}; use arrow_deps::{ arrow, arrow::{ array::{ArrayRef, BooleanBuilder, Float64Builder, Int64Builder, StringBuilder}, datatypes::{DataType as ArrowDataType, Field as ArrowField, Schema as ArrowSchema}, record_batch::RecordBatch, }, datafusion, datafusion::logical_plan::Expr, datafusion::logical_plan::LogicalPlan, datafusion::logical_plan::LogicalPlanBuilder, }; #[derive(Debug, Snafu)] pub enum Error { #[snafu(display("Table {} not found", table))] TableNotFound { table: String }, #[snafu(display( "Column {} said it was type {} but extracting a value of that type failed", column, expected ))] WalValueTypeMismatch { column: String, expected: String }, #[snafu(display( "Tag value ID {} not found in dictionary of partition {}", value, partition ))] TagValueIdNotFoundInDictionary { value: u32, partition: String, source: DictionaryError, }, #[snafu(display( "Column type mismatch for column {}: can't insert {} into column with type {}", column, inserted_value_type, existing_column_type ))] ColumnTypeMismatch { column: String, existing_column_type: String, inserted_value_type: String, }, #[snafu(display("Column error on column {}: {}", column, source))] ColumnError { column: String, source: column::Error, }, #[snafu(display( "Internal error: Expected column {} to be type {} but was {}", column_id, expected_column_type, actual_column_type ))] InternalColumnTypeMismatch { column_id: u32, expected_column_type: String, actual_column_type: String, }, #[snafu(display( "Column name '{}' not found in dictionary of partition {}", column_name, partition ))] ColumnNameNotFoundInDictionary { column_name: String, partition: String, source: DictionaryError, }, #[snafu(display( "Internal: Column id '{}' not found in dictionary of partition {}", column_id, partition ))] ColumnIdNotFoundInDictionary { column_id: u32, partition: String, source: DictionaryError, }, #[snafu(display( "Schema mismatch: for column {}: can't insert {} into column with type {}", column, inserted_value_type, existing_column_type ))] SchemaMismatch { column: u32, existing_column_type: String, inserted_value_type: String, }, #[snafu(display("Error building plan: {}", source))] BuildingPlan { source: datafusion::error::DataFusionError, }, #[snafu(display("arrow conversion error: {}", source))] ArrowError { source: arrow::error::ArrowError }, #[snafu(display("Schema mismatch: for column {}: {}", column, source))] InternalSchemaMismatch { column: u32, source: crate::column::Error, }, #[snafu(display( "No index entry found for column {} with id {}", column_name, column_id ))] InternalNoColumnInIndex { column_name: String, column_id: u32 }, #[snafu(display("Error creating column from wal for column {}: {}", column, source))] CreatingFromWal { column: u32, source: crate::column::Error, }, #[snafu(display("Error evaluating column predicate for column {}: {}", column, source))] ColumnPredicateEvaluation { column: u32, source: crate::column::Error, }, #[snafu(display("Row insert to table {} missing column name", table))] ColumnNameNotInRow { table: u32 }, #[snafu(display( "Group column '{}' not found in tag columns: {}", column_name, all_tag_column_names ))] GroupColumnNotFound { column_name: String, all_tag_column_names: String, }, #[snafu(display("Duplicate group column '{}'", column_name))] DuplicateGroupColumn { column_name: String }, } pub type Result<T, E = Error> = std::result::Result<T, E>; #[derive(Debug)] pub struct Table { /// Name of the table as a u32 in the partition dictionary pub id: u32, /// Maps column name (as a u32 in the partition dictionary) to an index in self.columns pub column_id_to_index: HashMap<u32, usize>, /// Actual column storage pub columns: Vec<Column>, } type ArcStringVec = Vec<Arc<String>>; impl Table { pub fn new(id: u32) -> Self { Self { id, column_id_to_index: HashMap::new(), columns: Vec::new(), } } fn append_row( &mut self, dictionary: &mut Dictionary, values: &flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<wb::Value<'_>>>, ) -> Result<()> { let row_count = self.row_count(); // insert new columns and validate existing ones for value in values { let column_name = value .column() .context(ColumnNameNotInRow { table: self.id })?; let column_id = dictionary.lookup_value_or_insert(column_name); let column = match self.column_id_to_index.get(&column_id) { Some(idx) => &mut self.columns[*idx], None => { // Add the column and make all values for existing rows None let idx = self.columns.len(); self.column_id_to_index.insert(column_id, idx); self.columns.push( Column::with_value(dictionary, row_count, value) .context(CreatingFromWal { column: column_id })?, ); continue; } }; column.push(dictionary, &value).context(ColumnError { column: column_name, })?; } // make sure all the columns are of the same length for col in &mut self.columns { col.push_none_if_len_equal(row_count); } Ok(()) } pub fn row_count(&self) -> usize { self.columns.first().map_or(0, |v| v.len()) } /// Returns a reference to the specified column fn column(&self, column_id: u32) -> Result<&Column> { Ok(self .column_id_to_index .get(&column_id) .map(|&column_index| &self.columns[column_index]) .expect("invalid column id")) } /// Returns a reference to the specified column as a slice of /// i64s. Errors if the type is not i64 pub fn column_i64(&self, column_id: u32) -> Result<&[Option<i64>]> { let column = self.column(column_id)?; match column { Column::I64(vals, _) => Ok(vals), _ => InternalColumnTypeMismatch { column_id, expected_column_type: "i64", actual_column_type: column.type_description(), } .fail(), } } pub fn append_rows( &mut self, dictionary: &mut Dictionary, rows: &flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<wb::Row<'_>>>, ) -> Result<()> { for row in rows { if let Some(values) = row.values() { self.append_row(dictionary, &values)?; } } Ok(()) } /// Creates and adds a datafuson filtering expression, if any out of the /// combination of predicate and timestamp. Returns the builder fn add_datafusion_predicate( plan_builder: LogicalPlanBuilder, partition_predicate: &PartitionPredicate, ) -> Result<LogicalPlanBuilder> { match partition_predicate.filter_expr() { Some(df_predicate) => plan_builder.filter(df_predicate).context(BuildingPlan), None => Ok(plan_builder), } } /// Creates a DataFusion LogicalPlan that returns column *names* as a /// single column of Strings /// /// The created plan looks like: /// /// Extension(PivotSchema) /// (Optional Projection to get rid of time) /// Filter(predicate) /// InMemoryScan pub fn tag_column_names_plan( &self, partition_predicate: &PartitionPredicate, partition: &Partition, ) -> Result<LogicalPlan> { let need_time_column = partition_predicate.range.is_some(); let time_column_id = partition_predicate.time_column_id; // figure out the tag columns let requested_columns_with_index = self .column_id_to_index .iter() .filter_map(|(&column_id, &column_index)| { // keep tag columns and the timestamp column, if needed to evaluate a timestamp predicate let need_column = if let Column::Tag(_, _) = self.columns[column_index] { true } else { need_time_column && column_id == time_column_id }; if need_column { // the id came out of our map, so it should always be valid let column_name = partition.dictionary.lookup_id(column_id).unwrap(); Some((column_name, column_index)) } else { None } }) .collect::<Vec<_>>(); // TODO avoid materializing here let data = self.to_arrow_impl(partition, &requested_columns_with_index)?; let schema = data.schema(); let projection = None; let projected_schema = schema.clone(); let plan_builder = LogicalPlanBuilder::from(&LogicalPlan::InMemoryScan { data: vec![vec![data]], schema, projection, projected_schema, }); // Shouldn't have field selections here (as we are getting the tags...) assert!(!partition_predicate.has_field_restriction()); let plan_builder = Self::add_datafusion_predicate(plan_builder, partition_predicate)?; // add optional selection to remove time column let plan_builder = if !need_time_column { plan_builder } else { // Create expressions for all columns except time let select_exprs = requested_columns_with_index .iter() .filter_map(|&(column_name, _)| { if column_name != TIME_COLUMN_NAME { Some(Expr::Column(column_name.into())) } else { None } }) .collect(); plan_builder.project(select_exprs).context(BuildingPlan)? }; let plan = plan_builder.build().context(BuildingPlan)?; // And finally pivot the plan let plan = make_schema_pivot(plan); debug!( "Created column_name plan for table '{}':\n{}", partition.dictionary.lookup_id(self.id).unwrap(), plan.display_indent_schema() ); Ok(plan) } /// Creates a DataFusion LogicalPlan that returns column *values* as a /// single column of Strings /// /// The created plan looks like: /// /// Projection /// Filter(predicate) /// InMemoryScan pub fn tag_values_plan( &self, column_name: &str, partition_predicate: &PartitionPredicate, partition: &Partition, ) -> Result<LogicalPlan> { // TODO avoid materializing all the columns here (ideally // DataFusion can prune them out) let data = self.all_to_arrow(partition)?; let schema = data.schema(); let projection = None; let projected_schema = schema.clone(); let select_exprs = vec![Expr::Column(column_name.into())]; // And build the plan! let plan_builder = LogicalPlanBuilder::from(&LogicalPlan::InMemoryScan { data: vec![vec![data]], schema, projection, projected_schema, }); // shouldn't have columns selection (as this is getting tag values...) assert!(!partition_predicate.has_field_restriction()); let plan_builder = Self::add_datafusion_predicate(plan_builder, partition_predicate)?; plan_builder .project(select_exprs) .context(BuildingPlan)? .build() .context(BuildingPlan) } /// Creates a SeriesSet plan that produces an output table with rows that match the predicate /// /// The output looks like: /// (tag_col1, tag_col2, ... field1, field2, ... timestamp) /// /// The order of the tag_columns is orderd by name. /// /// The data is sorted on tag_col1, tag_col2, ...) so that all /// rows for a particular series (groups where all tags are the /// same) occur together in the plan pub fn series_set_plan( &self, partition_predicate: &PartitionPredicate, partition: &Partition, ) -> Result<SeriesSetPlan> { self.series_set_plan_impl(partition_predicate, None, partition) } /// Creates the plans for computing series set, pulling prefix_columns, if any, as a prefix of the ordering /// The created plan looks like: /// /// Projection (select the columns columns needed) /// Order by (tag_columns, timestamp_column) /// Filter(predicate) /// InMemoryScan pub fn series_set_plan_impl( &self, partition_predicate: &PartitionPredicate, prefix_columns: Option<&[String]>, partition: &Partition, ) -> Result<SeriesSetPlan> { // I wonder if all this string creation will be too slow? let table_name = partition .dictionary .lookup_id(self.id) .expect("looking up table name in dictionary") .to_string(); let table_name = Arc::new(table_name); let (mut tag_columns, field_columns) = self.tag_and_field_column_names(partition_predicate, partition)?; // reorder tag_columns to have the prefix columns, if requested if let Some(prefix_columns) = prefix_columns { tag_columns = reorder_prefix(prefix_columns, tag_columns)?; } // TODO avoid materializing all the columns here (ideally // DataFusion can prune them out) let data = self.all_to_arrow(partition)?; let schema = data.schema(); let projection = None; let projected_schema = schema.clone(); // And build the plan from the bottom up let plan_builder = LogicalPlanBuilder::from(&LogicalPlan::InMemoryScan { data: vec![vec![data]], schema, projection, projected_schema, }); // Filtering let plan_builder = Self::add_datafusion_predicate(plan_builder, partition_predicate)?; let mut sort_exprs = Vec::new(); sort_exprs.extend(tag_columns.iter().map(|c| c.into_sort_expr())); sort_exprs.push(TIME_COLUMN_NAME.into_sort_expr()); // Order by let plan_builder = plan_builder.sort(sort_exprs).context(BuildingPlan)?; // Selection let mut select_exprs = Vec::new(); select_exprs.extend(tag_columns.iter().map(|c| c.into_expr())); select_exprs.extend(field_columns.iter().map(|c| c.into_expr())); select_exprs.push(TIME_COLUMN_NAME.into_expr()); let plan_builder = plan_builder.project(select_exprs).context(BuildingPlan)?; // and finally create the plan let plan = plan_builder.build().context(BuildingPlan)?; Ok(SeriesSetPlan { table_name, plan, tag_columns, field_columns, }) } /// Creates a GroupedSeriesSet plan that produces an output table with rows that match the predicate /// /// The output looks like: /// (group_tag_column1, group_tag_column2, ... tag_col1, tag_col2, ... field1, field2, ... timestamp) /// /// The order of the tag_columns is ordered by name. /// /// The data is sorted on tag_col1, tag_col2, ...) so that all /// rows for a particular series (groups where all tags are the /// same) occur together in the plan /// /// The created plan looks like: /// /// Projection (select the columns columns needed) /// Order by (tag_columns, timestamp_column) /// Filter(predicate) /// InMemoryScan pub fn grouped_series_set_plan( &self, partition_predicate: &PartitionPredicate, group_columns: &[String], partition: &Partition, ) -> Result<GroupedSeriesSetPlan> { let series_set_plan = self.series_set_plan_impl(partition_predicate, Some(&group_columns), partition)?; let num_prefix_tag_group_columns = group_columns.len(); Ok(GroupedSeriesSetPlan { series_set_plan, num_prefix_tag_group_columns, }) } /// Creates a plan that produces an output table with rows that /// match the predicate for all fields in the table. /// /// The output looks like (field0, field1, ..., time) /// /// The data is not sorted in any particular order /// /// The created plan looks like: /// /// Projection (select the field columns needed) /// Filter(predicate) [optional] /// InMemoryScan pub fn field_names_plan( &self, partition_predicate: &PartitionPredicate, partition: &Partition, ) -> Result<LogicalPlan> { // TODO avoid materializing all the columns here (ideally // DataFusion can prune them out) let data = self.all_to_arrow(partition)?; let schema = data.schema(); let projection = None; let projected_schema = schema.clone(); // And build the plan from the bottom up let plan_builder = LogicalPlanBuilder::from(&LogicalPlan::InMemoryScan { data: vec![vec![data]], schema, projection, projected_schema, }); // Filtering let plan_builder = Self::add_datafusion_predicate(plan_builder, partition_predicate)?; // Selection let select_exprs = self .field_and_time_column_names(partition_predicate, partition) .into_iter() .map(|c| c.into_expr()) .collect::<Vec<_>>(); let plan_builder = plan_builder.project(select_exprs).context(BuildingPlan)?; // and finally create the plan plan_builder.build().context(BuildingPlan) } // Returns (tag_columns, field_columns) vectors with the names of // all tag and field columns, respectively. The vectors are sorted // by name. fn tag_and_field_column_names( &self, partition_predicate: &PartitionPredicate, partition: &Partition, ) -> Result<(ArcStringVec, ArcStringVec)> { let mut tag_columns = Vec::with_capacity(self.column_id_to_index.len()); let mut field_columns = Vec::with_capacity(self.column_id_to_index.len()); for (&column_id, &column_index) in &self.column_id_to_index { let column_name = partition .dictionary .lookup_id(column_id) .expect("Find column name in dictionary"); if column_name != TIME_COLUMN_NAME { let column_name = Arc::new(column_name.to_string()); match self.columns[column_index] { Column::Tag(_, _) => tag_columns.push(column_name), _ => { if partition_predicate.should_include_field(column_id) { field_columns.push(column_name) } } } } } // tag columns are always sorted by name (aka sorted by tag // key) in the output schema, so ensure the columns are sorted // (the select exprs) tag_columns.sort(); // Sort the field columns too so that the output always comes // out in a predictable order field_columns.sort(); Ok((tag_columns, field_columns)) } // Returns (field_columns and time) in sorted order fn field_and_time_column_names( &self, partition_predicate: &PartitionPredicate, partition: &Partition, ) -> ArcStringVec { let mut field_columns = self .column_id_to_index .iter() .filter_map(|(&column_id, &column_index)| { match self.columns[column_index] { Column::Tag(_, _) => None, // skip tags _ => { if partition_predicate.should_include_field(column_id) || partition_predicate.is_time_column(column_id) { let column_name = partition .dictionary .lookup_id(column_id) .expect("Find column name in dictionary"); Some(Arc::new(column_name.to_string())) } else { None } } } }) .collect::<Vec<_>>(); // Sort the field columns too so that the output always comes // out in a predictable order field_columns.sort(); field_columns } /// Converts this table to an arrow record batch. pub fn to_arrow( &self, partition: &Partition, requested_columns: &[&str], ) -> Result<RecordBatch> { // if requested columns is empty, retrieve all columns in the table if requested_columns.is_empty() { self.all_to_arrow(partition) } else { let columns_with_index = self.column_names_with_index(partition, requested_columns)?; self.to_arrow_impl(partition, &columns_with_index) } } fn column_names_with_index<'a>( &self, partition: &Partition, columns: &[&'a str], ) -> Result<Vec<(&'a str, usize)>> { columns .iter() .map(|&column_name| { let column_id = partition.dictionary.lookup_value(column_name).context( ColumnNameNotFoundInDictionary { column_name, partition: &partition.key, }, )?; let column_index = *self .column_id_to_index .get(&column_id) .context(InternalNoColumnInIndex { column_name, column_id, })?; Ok((column_name, column_index)) }) .collect() } /// Convert all columns to an arrow record batch pub fn all_to_arrow(&self, partition: &Partition) -> Result<RecordBatch> { let mut requested_columns_with_index = self .column_id_to_index .iter() .map(|(&column_id, &column_index)| { let column_name = partition.dictionary.lookup_id(column_id).context( ColumnIdNotFoundInDictionary { column_id, partition: &partition.key, }, )?; Ok((column_name, column_index)) }) .collect::<Result<Vec<_>>>()?; requested_columns_with_index.sort_by(|(a, _), (b, _)| a.cmp(b)); self.to_arrow_impl(partition, &requested_columns_with_index) } /// Converts this table to an arrow record batch, /// /// requested columns with index are tuples of column_name, column_index pub fn to_arrow_impl( &self, partition: &Partition, requested_columns_with_index: &[(&str, usize)], ) -> Result<RecordBatch> { let mut fields = Vec::with_capacity(requested_columns_with_index.len()); let mut columns: Vec<ArrayRef> = Vec::with_capacity(requested_columns_with_index.len()); for &(column_name, column_index) in requested_columns_with_index.iter() { let arrow_col: ArrayRef = match &self.columns[column_index] { Column::String(vals, _) => { fields.push(ArrowField::new(column_name, ArrowDataType::Utf8, true)); let mut builder = StringBuilder::with_capacity(vals.len(), vals.len() * 10); for v in vals { match v { None => builder.append_null(), Some(s) => builder.append_value(s), } .context(ArrowError {})?; } Arc::new(builder.finish()) } Column::Tag(vals, _) => { fields.push(ArrowField::new(column_name, ArrowDataType::Utf8, true)); let mut builder = StringBuilder::with_capacity(vals.len(), vals.len() * 10); for v in vals { match v { None => builder.append_null(), Some(value_id) => { let tag_value = partition.dictionary.lookup_id(*value_id).context( TagValueIdNotFoundInDictionary { value: *value_id, partition: &partition.key, }, )?; builder.append_value(tag_value) } } .context(ArrowError {})?; } Arc::new(builder.finish()) } Column::F64(vals, _) => { fields.push(ArrowField::new(column_name, ArrowDataType::Float64, true)); let mut builder = Float64Builder::new(vals.len()); for v in vals { builder.append_option(*v).context(ArrowError {})?; } Arc::new(builder.finish()) } Column::I64(vals, _) => { fields.push(ArrowField::new(column_name, ArrowDataType::Int64, true)); let mut builder = Int64Builder::new(vals.len()); for v in vals { builder.append_option(*v).context(ArrowError {})?; } Arc::new(builder.finish()) } Column::Bool(vals, _) => { fields.push(ArrowField::new(column_name, ArrowDataType::Boolean, true)); let mut builder = BooleanBuilder::new(vals.len()); for v in vals { builder.append_option(*v).context(ArrowError {})?; } Arc::new(builder.finish()) } }; columns.push(arrow_col); } let schema = ArrowSchema::new(fields); RecordBatch::try_new(Arc::new(schema), columns).context(ArrowError {}) } /// returns true if any row in this table could possible match the /// predicate. true does not mean any rows will *actually* match, /// just that the entire table can not be ruled out. /// /// false means that no rows in this table could possibly match pub fn could_match_predicate(&self, partition_predicate: &PartitionPredicate) -> Result<bool> { Ok( self.matches_column_selection(partition_predicate.field_restriction.as_ref()) && self.matches_table_name_predicate( partition_predicate.table_name_predicate.as_ref(), ) && self.matches_timestamp_predicate(partition_predicate)? && self.has_columns(partition_predicate.required_columns.as_ref()), ) } /// Returns true if the table contains at least one of the fields /// requested or there are no specific fields requested. fn matches_column_selection(&self, column_selection: Option<&BTreeSet<u32>>) -> bool { match column_selection { Some(column_selection) => { // figure out if any of the columns exists self.column_id_to_index .keys() .any(|column_id| column_selection.contains(column_id)) } None => true, // no specific selection } } fn matches_table_name_predicate(&self, table_name_predicate: Option<&BTreeSet<u32>>) -> bool { match table_name_predicate { Some(table_name_predicate) => table_name_predicate.contains(&self.id), None => true, // no table predicate } } /// returns true if there are any timestamps in this table that /// fall within the timestamp range fn matches_timestamp_predicate( &self, partition_predicate: &PartitionPredicate, ) -> Result<bool> { match &partition_predicate.range { None => Ok(true), Some(range) => { let time_column_id = partition_predicate.time_column_id; let time_column = self.column(time_column_id)?; time_column.has_i64_range(range.start, range.end).context( ColumnPredicateEvaluation { column: time_column_id, }, ) } } } /// returns true if no columns are specified, or the table has all /// columns specified fn has_columns(&self, columns: Option<&PartitionIdSet>) -> bool { if let Some(columns) = columns { match columns { PartitionIdSet::AtLeastOneMissing => return false, PartitionIdSet::Present(symbols) => { for symbol in symbols { if !self.column_id_to_index.contains_key(symbol) { return false; } } } } } true } /// returns true if there are any rows in column that are non-null /// and within the timestamp range specified by pred pub fn column_matches_predicate<T>( &self, column: &[Option<T>], partition_predicate: &PartitionPredicate, ) -> Result<bool> { match partition_predicate.range { None => Ok(true), Some(range) => { let time_column_id = partition_predicate.time_column_id; let time_column = self.column(time_column_id)?; time_column .has_non_null_i64_range(column, range.start, range.end) .context(ColumnPredicateEvaluation { column: time_column_id, }) } } } } /// Reorders tag_columns so that its prefix matches exactly /// prefix_columns. Returns an error if there are duplicates, or other /// untoward inputs fn reorder_prefix( prefix_columns: &[String], tag_columns: Vec<Arc<String>>, ) -> Result<Vec<Arc<String>>> { // tag_used_set[i[ is true if we have used the value in tag_columns[i] let mut tag_used_set = vec![false; tag_columns.len()]; // Note that this is an O(N^2) algorithm. We are assuming the // number of tag columns is reasonably small // map from prefix_column[idx] -> index in tag_columns let prefix_map = prefix_columns .iter() .map(|pc| { let found_location = tag_columns .iter() .enumerate() .find(|(_, c)| pc == c.as_ref()); if let Some((index, _)) = found_location { if tag_used_set[index] { DuplicateGroupColumn { column_name: pc }.fail() } else { tag_used_set[index] = true; Ok(index) } } else { GroupColumnNotFound { column_name: pc, all_tag_column_names: tag_columns .iter() .map(|s| s.as_ref() as &str) .collect::<Vec<_>>() .as_slice() .join(", "), } .fail() } }) .collect::<Result<Vec<_>>>()?; let mut new_tag_columns = prefix_map .iter() .map(|&i| tag_columns[i].clone()) .collect::<Vec<_>>(); new_tag_columns.extend(tag_columns.into_iter().enumerate().filter_map(|(i, c)| { // already used in prefix if tag_used_set[i] { None } else { Some(c) } })); Ok(new_tag_columns) } /// Traits to help creating DataFuson expressions from strings trait IntoExpr { /// Creates a DataFuson expr fn into_expr(&self) -> Expr; /// creates a DataFusion SortExpr fn into_sort_expr(&self) -> Expr { Expr::Sort { expr: Box::new(self.into_expr()), asc: true, // Sort ASCENDING nulls_first: true, } } } impl IntoExpr for Arc<String> { fn into_expr(&self) -> Expr { Expr::Column(self.as_ref().clone()) } } impl IntoExpr for str { fn into_expr(&self) -> Expr { Expr::Column(self.to_string()) } } #[cfg(test)] mod tests { use arrow::util::pretty::pretty_format_batches; use data_types::data::split_lines_into_write_entry_partitions; use datafusion::{logical_plan::Operator, scalar::ScalarValue}; use influxdb_line_protocol::{parse_lines, ParsedLine}; use query::{exec::Executor, predicate::PredicateBuilder}; use test_helpers::str_vec_to_arc_vec; use super::*; #[test] fn test_has_columns() { // setup a test table let mut partition = Partition::new("dummy_partition_key"); let dictionary = &mut partition.dictionary; let mut table = Table::new(dictionary.lookup_value_or_insert("table_name")); let lp_lines = vec![ "h2o,state=MA,city=Boston temp=70.4 100", "h2o,state=MA,city=Boston temp=72.4 250", ]; write_lines_to_table(&mut table, dictionary, lp_lines); let state_symbol = dictionary.id("state").unwrap(); let new_symbol = dictionary.lookup_value_or_insert("not_a_columns"); assert!(table.has_columns(None)); let pred = PartitionIdSet::AtLeastOneMissing; assert!(!table.has_columns(Some(&pred))); let set = BTreeSet::<u32>::new(); let pred = PartitionIdSet::Present(set); assert!(table.has_columns(Some(&pred))); let mut set = BTreeSet::new(); set.insert(state_symbol); let pred = PartitionIdSet::Present(set); assert!(table.has_columns(Some(&pred))); let mut set = BTreeSet::new(); set.insert(new_symbol); let pred = PartitionIdSet::Present(set); assert!(!table.has_columns(Some(&pred))); let mut set = BTreeSet::new(); set.insert(state_symbol); set.insert(new_symbol); let pred = PartitionIdSet::Present(set); assert!(!table.has_columns(Some(&pred))); } #[test] fn test_matches_table_name_predicate() { // setup a test table let mut partition = Partition::new("dummy_partition_key"); let dictionary = &mut partition.dictionary; let mut table = Table::new(dictionary.lookup_value_or_insert("h2o")); let lp_lines = vec![ "h2o,state=MA,city=Boston temp=70.4 100", "h2o,state=MA,city=Boston temp=72.4 250", ]; write_lines_to_table(&mut table, dictionary, lp_lines); let h2o_symbol = dictionary.id("h2o").unwrap(); assert!(table.matches_table_name_predicate(None)); let set = BTreeSet::new(); assert!(!table.matches_table_name_predicate(Some(&set))); let mut set = BTreeSet::new(); set.insert(h2o_symbol); assert!(table.matches_table_name_predicate(Some(&set))); // Some symbol that is not the same as h2o_symbol assert_ne!(37377, h2o_symbol); let mut set = BTreeSet::new(); set.insert(37377); assert!(!table.matches_table_name_predicate(Some(&set))); } #[tokio::test] async fn test_series_set_plan() { // setup a test table let mut partition = Partition::new("dummy_partition_key"); let dictionary = &mut partition.dictionary; let mut table = Table::new(dictionary.lookup_value_or_insert("table_name")); let lp_lines = vec![ "h2o,state=MA,city=Boston temp=70.4 100", "h2o,state=MA,city=Boston temp=72.4 250", "h2o,state=CA,city=LA temp=90.0 200", "h2o,state=CA,city=LA temp=90.0 350", ]; write_lines_to_table(&mut table, dictionary, lp_lines); let predicate = PredicateBuilder::default().build(); let partition_predicate = partition.compile_predicate(&predicate).unwrap(); let series_set_plan = table .series_set_plan(&partition_predicate, &partition) .expect("creating the series set plan"); assert_eq!(series_set_plan.table_name.as_ref(), "table_name"); assert_eq!( series_set_plan.tag_columns, *str_vec_to_arc_vec(&["city", "state"]) ); assert_eq!( series_set_plan.field_columns, *str_vec_to_arc_vec(&["temp"]) ); // run the created plan, ensuring the output is as expected let results = run_plan(series_set_plan.plan).await; let expected = vec![ "+--------+-------+------+------+", "| city | state | temp | time |", "+--------+-------+------+------+", "| Boston | MA | 70.4 | 100 |", "| Boston | MA | 72.4 | 250 |", "| LA | CA | 90 | 200 |", "| LA | CA | 90 | 350 |", "+--------+-------+------+------+", ]; assert_eq!(expected, results, "expected output"); } #[tokio::test] async fn test_series_set_plan_order() { // test that the columns and rows come out in the right order (tags then timestamp) // setup a test table let mut partition = Partition::new("dummy_partition_key"); let dictionary = &mut partition.dictionary; let mut table = Table::new(dictionary.lookup_value_or_insert("table_name")); let lp_lines = vec![ "h2o,zz_tag=A,state=MA,city=Kingston temp=70.1 800", "h2o,state=MA,city=Kingston,zz_tag=B temp=70.2 100", "h2o,state=CA,city=Boston temp=70.3 250", "h2o,state=MA,city=Boston,zz_tag=A temp=70.4 1000", "h2o,state=MA,city=Boston temp=70.5,other=5.0 250", ]; write_lines_to_table(&mut table, dictionary, lp_lines); let predicate = PredicateBuilder::default().build(); let partition_predicate = partition.compile_predicate(&predicate).unwrap(); let series_set_plan = table .series_set_plan(&partition_predicate, &partition) .expect("creating the series set plan"); assert_eq!(series_set_plan.table_name.as_ref(), "table_name"); assert_eq!( series_set_plan.tag_columns, *str_vec_to_arc_vec(&["city", "state", "zz_tag"]) ); assert_eq!( series_set_plan.field_columns, *str_vec_to_arc_vec(&["other", "temp"]) ); // run the created plan, ensuring the output is as expected let results = run_plan(series_set_plan.plan).await; let expected = vec![ "+----------+-------+--------+-------+------+------+", "| city | state | zz_tag | other | temp | time |", "+----------+-------+--------+-------+------+------+", "| Boston | CA | | | 70.3 | 250 |", "| Boston | MA | | 5 | 70.5 | 250 |", "| Boston | MA | A | | 70.4 | 1000 |", "| Kingston | MA | A | | 70.1 | 800 |", "| Kingston | MA | B | | 70.2 | 100 |", "+----------+-------+--------+-------+------+------+", ]; assert_eq!(expected, results, "expected output"); } #[tokio::test] async fn test_series_set_plan_filter() { // test that filters are applied reasonably // setup a test table let mut partition = Partition::new("dummy_partition_key"); let dictionary = &mut partition.dictionary; let mut table = Table::new(dictionary.lookup_value_or_insert("table_name")); let lp_lines = vec![ "h2o,state=MA,city=Boston temp=70.4 100", "h2o,state=MA,city=Boston temp=72.4 250", "h2o,state=CA,city=LA temp=90.0 200", "h2o,state=CA,city=LA temp=90.0 350", ]; write_lines_to_table(&mut table, dictionary, lp_lines); let predicate = PredicateBuilder::default() .add_expr(Expr::BinaryExpr { left: Box::new(Expr::Column("city".into())), op: Operator::Eq, right: Box::new(Expr::Literal(ScalarValue::Utf8(Some("LA".into())))), }) .timestamp_range(190, 210) .build(); let partition_predicate = partition.compile_predicate(&predicate).unwrap(); let series_set_plan = table .series_set_plan(&partition_predicate, &partition) .expect("creating the series set plan"); assert_eq!(series_set_plan.table_name.as_ref(), "table_name"); assert_eq!( series_set_plan.tag_columns, *str_vec_to_arc_vec(&["city", "state"]) ); assert_eq!( series_set_plan.field_columns, *str_vec_to_arc_vec(&["temp"]) ); // run the created plan, ensuring the output is as expected let results = run_plan(series_set_plan.plan).await; let expected = vec![ "+------+-------+------+------+", "| city | state | temp | time |", "+------+-------+------+------+", "| LA | CA | 90 | 200 |", "+------+-------+------+------+", ]; assert_eq!(expected, results, "expected output"); } #[tokio::test] async fn test_grouped_series_set_plan() { // test that filters are applied reasonably // setup a test table let mut partition = Partition::new("dummy_partition_key"); let dictionary = &mut partition.dictionary; let mut table = Table::new(dictionary.lookup_value_or_insert("table_name")); let lp_lines = vec![ "h2o,state=MA,city=Boston temp=70.4 100", "h2o,state=MA,city=Boston temp=72.4 250", "h2o,state=CA,city=LA temp=90.0 200", "h2o,state=CA,city=LA temp=90.0 350", ]; write_lines_to_table(&mut table, dictionary, lp_lines); let predicate = PredicateBuilder::default() .add_expr(Expr::BinaryExpr { left: Box::new(Expr::Column("city".into())), op: Operator::Eq, right: Box::new(Expr::Literal(ScalarValue::Utf8(Some("LA".into())))), }) .timestamp_range(190, 210) .build(); let partition_predicate = partition.compile_predicate(&predicate).unwrap(); let group_columns = vec![String::from("state")]; let grouped_series_set_plan = table .grouped_series_set_plan(&partition_predicate, &group_columns, &partition) .expect("creating the grouped_series set plan"); assert_eq!(grouped_series_set_plan.num_prefix_tag_group_columns, 1); // run the created plan, ensuring the output is as expected let results = run_plan(grouped_series_set_plan.series_set_plan.plan).await; let expected = vec![ "+-------+------+------+------+", "| state | city | temp | time |", "+-------+------+------+------+", "| CA | LA | 90 | 200 |", "+-------+------+------+------+", ]; assert_eq!(expected, results, "expected output"); } #[tokio::test] async fn test_field_name_plan() { // setup a test table let mut partition = Partition::new("dummy_partition_key"); let dictionary = &mut partition.dictionary; let mut table = Table::new(dictionary.lookup_value_or_insert("table_name")); let lp_lines = vec![ // Order this so field3 comes before field2 // (and thus the columns need to get reordered) "h2o,tag1=foo,tag2=bar field1=70.6,field3=2 100", "h2o,tag1=foo,tag2=bar field1=70.4,field2=\"ss\" 100", "h2o,tag1=foo,tag2=bar field1=70.5,field2=\"ss\" 100", "h2o,tag1=foo,tag2=bar field1=70.6,field4=true 1000", ]; write_lines_to_table(&mut table, dictionary, lp_lines); let predicate = PredicateBuilder::default().timestamp_range(0, 200).build(); let partition_predicate = partition.compile_predicate(&predicate).unwrap(); let field_names_set_plan = table .field_names_plan(&partition_predicate, &partition) .expect("creating the field_name plan"); // run the created plan, ensuring the output is as expected let results = run_plan(field_names_set_plan).await; let expected = vec![ "+--------+--------+--------+--------+------+", "| field1 | field2 | field3 | field4 | time |", "+--------+--------+--------+--------+------+", "| 70.6 | | 2 | | 100 |", "| 70.4 | ss | | | 100 |", "| 70.5 | ss | | | 100 |", "+--------+--------+--------+--------+------+", ]; assert_eq!(expected, results, "expected output"); } #[test] fn test_reorder_prefix() { assert_eq!(reorder_prefix_ok(&[], &[]), &[] as &[&str]); assert_eq!(reorder_prefix_ok(&[], &["one"]), &["one"]); assert_eq!(reorder_prefix_ok(&["one"], &["one"]), &["one"]); assert_eq!(reorder_prefix_ok(&[], &["one", "two"]), &["one", "two"]); assert_eq!( reorder_prefix_ok(&["one"], &["one", "two"]), &["one", "two"] ); assert_eq!( reorder_prefix_ok(&["two"], &["one", "two"]), &["two", "one"] ); assert_eq!( reorder_prefix_ok(&["two", "one"], &["one", "two"]), &["two", "one"] ); assert_eq!( reorder_prefix_ok(&[], &["one", "two", "three"]), &["one", "two", "three"] ); assert_eq!( reorder_prefix_ok(&["one"], &["one", "two", "three"]), &["one", "two", "three"] ); assert_eq!( reorder_prefix_ok(&["two"], &["one", "two", "three"]), &["two", "one", "three"] ); assert_eq!( reorder_prefix_ok(&["three", "one"], &["one", "two", "three"]), &["three", "one", "two"] ); // errors assert_eq!( reorder_prefix_err(&["one"], &[]), "Group column \'one\' not found in tag columns: " ); assert_eq!( reorder_prefix_err(&["one"], &["two", "three"]), "Group column \'one\' not found in tag columns: two, three" ); assert_eq!( reorder_prefix_err(&["two", "one", "two"], &["one", "two"]), "Duplicate group column \'two\'" ); } fn reorder_prefix_ok(prefix: &[&str], table_columns: &[&str]) -> Vec<String> { let prefix = prefix.iter().map(|s| s.to_string()).collect::<Vec<_>>(); let table_columns = Arc::try_unwrap(str_vec_to_arc_vec(table_columns)).expect("unwrap the arc"); let res = reorder_prefix(&prefix, table_columns); let message = format!("Expected OK, got {:?}", res); let res = res.expect(&message); res.into_iter() .map(|a| Arc::try_unwrap(a).expect("unwrapping arc")) .collect() } // returns the error string or panics if `reorder_prefix` doesn't return an error fn reorder_prefix_err(prefix: &[&str], table_columns: &[&str]) -> String { let prefix = prefix.iter().map(|s| s.to_string()).collect::<Vec<_>>(); let table_columns = Arc::try_unwrap(str_vec_to_arc_vec(table_columns)).expect("unwrap the arc"); let res = reorder_prefix(&prefix, table_columns); match res { Ok(r) => { panic!( "Expected error result from reorder_prefix_err, but was OK: '{:?}'", r ); } Err(e) => format!("{}", e), } } /// Runs `plan` and returns the output as petty-formatted array of strings async fn run_plan(plan: LogicalPlan) -> Vec<String> { // run the created plan, ensuring the output is as expected let batches = Executor::new() .run_logical_plan(plan) .await .expect("ok running plan"); pretty_format_batches(&batches) .expect("formatting results") .trim() .split('\n') .map(|s| s.to_string()) .collect::<Vec<_>>() } /// Insert the line protocol lines in `lp_lines` into this table fn write_lines_to_table(table: &mut Table, dictionary: &mut Dictionary, lp_lines: Vec<&str>) { let lp_data = lp_lines.join("\n"); let lines: Vec<_> = parse_lines(&lp_data).map(|l| l.unwrap()).collect(); let data = split_lines_into_write_entry_partitions(partition_key_func, &lines); let batch = flatbuffers::get_root::<wb::WriteBufferBatch<'_>>(&data); let entries = batch.entries().expect("at least one entry"); for entry in entries { let table_batches = entry.table_batches().expect("there were table batches"); for batch in table_batches { let rows = batch.rows().expect("Had rows in the batch"); table .append_rows(dictionary, &rows) .expect("Appended the row"); } } } fn partition_key_func(_: &ParsedLine<'_>) -> String { String::from("the_partition_key") } }
34.947368
111
0.546957
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_has_columns() {\n // setup a test table\n let mut partition = Partition::new(\"dummy_partition_key\");\n let dictionary = &mut partition.dictionary;\n let mut table = Table::new(dictionary.lookup_value_or_insert(\"table_name\"));\n\n let lp_lines = vec![\n \"h2o,state=MA,city=Boston temp=70.4 100\",\n \"h2o,state=MA,city=Boston temp=72.4 250\",\n ];\n\n write_lines_to_table(&mut table, dictionary, lp_lines);\n\n let state_symbol = dictionary.id(\"state\").unwrap();\n let new_symbol = dictionary.lookup_value_or_insert(\"not_a_columns\");\n\n assert!(table.has_columns(None));\n\n let pred = PartitionIdSet::AtLeastOneMissing;\n assert!(!table.has_columns(Some(&pred)));\n\n let set = BTreeSet::<u32>::new();\n let pred = PartitionIdSet::Present(set);\n assert!(table.has_columns(Some(&pred)));\n\n let mut set = BTreeSet::new();\n set.insert(state_symbol);\n let pred = PartitionIdSet::Present(set);\n assert!(table.has_columns(Some(&pred)));\n\n let mut set = BTreeSet::new();\n set.insert(new_symbol);\n let pred = PartitionIdSet::Present(set);\n assert!(!table.has_columns(Some(&pred)));\n\n let mut set = BTreeSet::new();\n set.insert(state_symbol);\n set.insert(new_symbol);\n let pred = PartitionIdSet::Present(set);\n assert!(!table.has_columns(Some(&pred)));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_matches_table_name_predicate() {\n // setup a test table\n let mut partition = Partition::new(\"dummy_partition_key\");\n let dictionary = &mut partition.dictionary;\n let mut table = Table::new(dictionary.lookup_value_or_insert(\"h2o\"));\n\n let lp_lines = vec![\n \"h2o,state=MA,city=Boston temp=70.4 100\",\n \"h2o,state=MA,city=Boston temp=72.4 250\",\n ];\n write_lines_to_table(&mut table, dictionary, lp_lines);\n\n let h2o_symbol = dictionary.id(\"h2o\").unwrap();\n\n assert!(table.matches_table_name_predicate(None));\n\n let set = BTreeSet::new();\n assert!(!table.matches_table_name_predicate(Some(&set)));\n\n let mut set = BTreeSet::new();\n set.insert(h2o_symbol);\n assert!(table.matches_table_name_predicate(Some(&set)));\n\n // Some symbol that is not the same as h2o_symbol\n assert_ne!(37377, h2o_symbol);\n let mut set = BTreeSet::new();\n set.insert(37377);\n assert!(!table.matches_table_name_predicate(Some(&set)));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_reorder_prefix() {\n assert_eq!(reorder_prefix_ok(&[], &[]), &[] as &[&str]);\n\n assert_eq!(reorder_prefix_ok(&[], &[\"one\"]), &[\"one\"]);\n assert_eq!(reorder_prefix_ok(&[\"one\"], &[\"one\"]), &[\"one\"]);\n\n assert_eq!(reorder_prefix_ok(&[], &[\"one\", \"two\"]), &[\"one\", \"two\"]);\n assert_eq!(\n reorder_prefix_ok(&[\"one\"], &[\"one\", \"two\"]),\n &[\"one\", \"two\"]\n );\n assert_eq!(\n reorder_prefix_ok(&[\"two\"], &[\"one\", \"two\"]),\n &[\"two\", \"one\"]\n );\n assert_eq!(\n reorder_prefix_ok(&[\"two\", \"one\"], &[\"one\", \"two\"]),\n &[\"two\", \"one\"]\n );\n\n assert_eq!(\n reorder_prefix_ok(&[], &[\"one\", \"two\", \"three\"]),\n &[\"one\", \"two\", \"three\"]\n );\n assert_eq!(\n reorder_prefix_ok(&[\"one\"], &[\"one\", \"two\", \"three\"]),\n &[\"one\", \"two\", \"three\"]\n );\n assert_eq!(\n reorder_prefix_ok(&[\"two\"], &[\"one\", \"two\", \"three\"]),\n &[\"two\", \"one\", \"three\"]\n );\n assert_eq!(\n reorder_prefix_ok(&[\"three\", \"one\"], &[\"one\", \"two\", \"three\"]),\n &[\"three\", \"one\", \"two\"]\n );\n\n // errors\n assert_eq!(\n reorder_prefix_err(&[\"one\"], &[]),\n \"Group column \\'one\\' not found in tag columns: \"\n );\n assert_eq!(\n reorder_prefix_err(&[\"one\"], &[\"two\", \"three\"]),\n \"Group column \\'one\\' not found in tag columns: two, three\"\n );\n assert_eq!(\n reorder_prefix_err(&[\"two\", \"one\", \"two\"], &[\"one\", \"two\"]),\n \"Duplicate group column \\'two\\'\"\n );\n }\n}" ]
f705aecba5408db48580803d5f08c6fd72e20f31
9,884
rs
Rust
src/webhook_handler.rs
FlareFlo/WT_event_handler
6eb30b5db324f3722d85180b72f5e2efb082d31d
[ "MIT" ]
1
2021-07-26T16:17:11.000Z
2021-07-26T16:17:11.000Z
src/webhook_handler.rs
FlareFlo/WT_event_handler
6eb30b5db324f3722d85180b72f5e2efb082d31d
[ "MIT" ]
6
2021-08-30T18:17:35.000Z
2021-11-08T15:29:51.000Z
src/webhook_handler.rs
FlareFlo/WT_event_handler
6eb30b5db324f3722d85180b72f5e2efb082d31d
[ "MIT" ]
1
2021-08-04T09:01:47.000Z
2021-08-04T09:01:47.000Z
use std::fs; use log::{error, warn}; use serenity::http::Http; use crate::json::recent::Channel; use crate::json::webhooks::{FilterType, Hooks, WebhookAuth}; use crate::scrapers::scraper_resources::resources::ScrapeType; use crate::TOKEN_PATH; const DEFAULT_KEYWORDS: [&str; 28] = [ "devblog", "event", "maintenance", "major", "trailer", "teaser", "developers", "fix", "vehicles", "economy", "changes", "sale", "twitch", "bundles", "development", "shop", "pass", "season", "operation", "pass", "summer", "2021", "planned", "bonds", "issues", "technical", "servers", "christmas" ]; impl Channel { pub async fn handle_webhook(&self, content: &str, is_filtered: bool, scrape_type: ScrapeType) { let token_raw = fs::read_to_string(TOKEN_PATH).expect("Cannot read file"); let webhook_auth: WebhookAuth = serde_json::from_str(&token_raw).expect("Json cannot be read"); for (i, hook) in webhook_auth.hooks.iter().enumerate() { if is_filtered { if match_filter(content, hook, scrape_type) { deliver_webhooks(content, i).await; } } else { deliver_webhooks(content, i).await; } } } } fn match_filter(content: &str, hook: &Hooks, scrape_type: ScrapeType) -> bool { match scrape_type { ScrapeType::Main => { filter_main(content, hook) } ScrapeType::Forum => { filter_forum(content, hook) } } } fn filter_main(content: &str, hook: &Hooks) -> bool { let main_filter = &hook.main_filter; match main_filter { FilterType::Default => { for keyword in DEFAULT_KEYWORDS { if content.contains(keyword) { print_log(&format!("URL {} matched with default main keyword {}", content, keyword)); return true; } } print_log(&format!("URL {} did not match any whitelist in main default list", content)); false } FilterType::Blacklist => { let blacklist = &hook.main_keywords; if blacklist.is_empty() { print_log(&format!("URL {} matched empty blacklist for main", content)); return true; } for keyword in blacklist { if content.contains(keyword) { print_log(&format!("URL {} found in blacklist for main", content)); return false; } } print_log(&format!("{} is not in main blacklist", content)); true } FilterType::Whitelist => { let whitelist = &hook.main_keywords; for keyword in whitelist { if content.contains(keyword) { print_log(&format!("URL {} matched with whitelisted keyword {} from main list", content, keyword)); return true; } } print_log(&format!("URL {} did not match any whitelist in main list", content)); false } } } fn filter_forum(content: &str, hook: &Hooks) -> bool { let forum_filter = &hook.forum_filter; match forum_filter { FilterType::Default => { for keyword in DEFAULT_KEYWORDS { if content.contains(keyword) { print_log(&format!("URL {} matched with default forum keyword {}", content, keyword)); return true; } } print_log(&format!("URL {} did not match any whitelist in forum default list", content)); false } FilterType::Blacklist => { let blacklist = &hook.forum_keywords; println!("{:?}", blacklist); if blacklist.is_empty() { print_log(&format!("URL {} matched empty blacklist for forum", content)); return true; } for keyword in blacklist { if content.contains(keyword) { print_log(&format!("URL {} found in blacklist for forum", content)); return false; } } print_log(&format!("{} is not in forum blacklist", content)); true } FilterType::Whitelist => { let whitelist = &hook.forum_keywords; for keyword in whitelist { if content.contains(keyword) { print_log(&format!("URL {} matched with whitelisted keyword {} from forum list", content, keyword)); return true; } } print_log(&format!("URL {} did not match any whitelist in forum list", content)); false } } } //Finally sends the webhook to the servers async fn deliver_webhooks(content: &str, pos: usize) { let token_raw = fs::read_to_string(TOKEN_PATH).expect("Cannot read file"); let webhook_auth: WebhookAuth = serde_json::from_str(&token_raw).expect("Json cannot be read"); let uid = webhook_auth.hooks[pos].uid; let token = &webhook_auth.hooks[pos].token; let my_http_client = Http::new_with_token(token); let webhook = match my_http_client.get_webhook_with_token(uid, token).await { Err(why) => { println!("{}", why); error!("{}", why); panic!("") } Ok(hook) => hook, }; webhook.execute(my_http_client, false, |w| { w.content(&format!("[{a}]()", a = content)); w.username("The WT news bot"); w.avatar_url("https://cdn.discordapp.com/attachments/866634236232597534/868623209631744000/the_news_broke.png"); w }).await.unwrap(); } fn print_log(input: &str) { println!("{}", input); warn!("{}", input); } // Tests ----------------------------------------------------------------------- mod tests { #[allow(unused_imports)] use crate::json::webhooks::FilterType::{Blacklist, Whitelist}; #[allow(unused_imports)] use super::*; // Main tests ------------------------------------------------------------------- #[test] fn main_test_filter_default_pass() { assert_eq!(match_filter("pass", &Hooks { name: "".to_string(), token: "".to_string(), uid: 0, main_filter: FilterType::default(), forum_filter: FilterType::default(), main_keywords: vec![], forum_keywords: vec![], }, ScrapeType::Main), true) } #[test] fn main_test_filter_default_no_match() { assert_eq!(match_filter("xyz", &Hooks { name: "".to_string(), token: "".to_string(), uid: 0, main_filter: FilterType::default(), forum_filter: FilterType::default(), main_keywords: vec![], forum_keywords: vec![], }, ScrapeType::Main), false); } #[test] fn main_test_filter_whitelist_match() { assert_eq!(match_filter("C", &Hooks { name: "".to_string(), token: "".to_string(), uid: 0, main_filter: Whitelist, forum_filter: Blacklist, main_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()], forum_keywords: vec!["W".to_owned(), "X".to_owned(), "Y".to_owned(), "Z".to_owned()], }, ScrapeType::Main), true); } #[test] #[should_panic] fn main_test_filter_whitelist_miss() { assert_eq!(match_filter("E", &Hooks { name: "".to_string(), token: "".to_string(), uid: 0, main_filter: Whitelist, forum_filter: Whitelist, main_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()], forum_keywords: vec!["W".to_owned(), "X".to_owned(), "Y".to_owned(), "Z".to_owned()], }, ScrapeType::Main), true); } #[test] #[should_panic] fn main_test_filter_blacklist_match() { assert_eq!(match_filter("C", &Hooks { name: "".to_string(), token: "".to_string(), uid: 0, main_filter: Blacklist, forum_filter: Blacklist, main_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()], forum_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()], }, ScrapeType::Main), true); } #[test] fn main_test_filter_blacklist_miss() { assert_eq!(match_filter("E", &Hooks { name: "".to_string(), token: "".to_string(), uid: 0, main_filter: Blacklist, forum_filter: Blacklist, main_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()], forum_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()], }, ScrapeType::Main), true); } // forum tests ------------------------------------------------------------------ #[test] fn forum_test_filter_default_pass() { assert_eq!(match_filter("pass", &Hooks { name: "".to_string(), token: "".to_string(), uid: 0, main_filter: FilterType::default(), forum_filter: FilterType::default(), main_keywords: vec![], forum_keywords: vec![], }, ScrapeType::Forum), true) } #[test] fn forum_test_filter_default_no_match() { assert_eq!(match_filter("xyz", &Hooks { name: "".to_string(), token: "".to_string(), uid: 0, main_filter: FilterType::default(), forum_filter: FilterType::default(), main_keywords: vec![], forum_keywords: vec![], }, ScrapeType::Forum), false); } #[test] fn forum_test_filter_whitelist_match() { assert_eq!(match_filter("C", &Hooks { name: "".to_string(), token: "".to_string(), uid: 0, main_filter: Whitelist, forum_filter: Blacklist, main_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()], forum_keywords: vec!["W".to_owned(), "X".to_owned(), "Y".to_owned(), "Z".to_owned()], }, ScrapeType::Forum), true); } #[test] fn forum_test_filter_whitelist_miss() { assert_eq!(match_filter("E", &Hooks { name: "".to_string(), token: "".to_string(), uid: 0, main_filter: Whitelist, forum_filter: Whitelist, main_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()], forum_keywords: vec!["W".to_owned(), "X".to_owned(), "Y".to_owned(), "Z".to_owned()], }, ScrapeType::Forum), false); } #[test] fn forum_test_filter_blacklist_match() { assert_eq!(match_filter("C", &Hooks { name: "".to_string(), token: "".to_string(), uid: 0, main_filter: Blacklist, forum_filter: Blacklist, main_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()], forum_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()], }, ScrapeType::Forum), false); } #[test] fn forum_test_filter_blacklist_miss() { match_filter("E", &Hooks { name: "".to_string(), token: "".to_string(), uid: 0, main_filter: Blacklist, forum_filter: Blacklist, main_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()], forum_keywords: vec!["A".to_owned(), "B".to_owned(), "C".to_owned(), "D".to_owned()], }, ScrapeType::Forum); } }
29.504478
114
0.632234
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn main_test_filter_default_pass() {\n\t\tassert_eq!(match_filter(\"pass\", &Hooks {\n\t\t\tname: \"\".to_string(),\n\t\t\ttoken: \"\".to_string(),\n\t\t\tuid: 0,\n\t\t\tmain_filter: FilterType::default(),\n\t\t\tforum_filter: FilterType::default(),\n\t\t\tmain_keywords: vec![],\n\t\t\tforum_keywords: vec![],\n\t\t}, ScrapeType::Main), true)\n\t}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn main_test_filter_default_no_match() {\n\t\tassert_eq!(match_filter(\"xyz\", &Hooks {\n\t\t\tname: \"\".to_string(),\n\t\t\ttoken: \"\".to_string(),\n\t\t\tuid: 0,\n\t\t\tmain_filter: FilterType::default(),\n\t\t\tforum_filter: FilterType::default(),\n\t\t\tmain_keywords: vec![],\n\t\t\tforum_keywords: vec![],\n\t\t}, ScrapeType::Main), false);\n\t}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn main_test_filter_whitelist_match() {\n\t\tassert_eq!(match_filter(\"C\", &Hooks {\n\t\t\tname: \"\".to_string(),\n\t\t\ttoken: \"\".to_string(),\n\t\t\tuid: 0,\n\t\t\tmain_filter: Whitelist,\n\t\t\tforum_filter: Blacklist,\n\t\t\tmain_keywords: vec![\"A\".to_owned(), \"B\".to_owned(), \"C\".to_owned(), \"D\".to_owned()],\n\t\t\tforum_keywords: vec![\"W\".to_owned(), \"X\".to_owned(), \"Y\".to_owned(), \"Z\".to_owned()],\n\t\t}, ScrapeType::Main), true);\n\t}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn main_test_filter_whitelist_miss() {\n\t\tassert_eq!(match_filter(\"E\", &Hooks {\n\t\t\tname: \"\".to_string(),\n\t\t\ttoken: \"\".to_string(),\n\t\t\tuid: 0,\n\t\t\tmain_filter: Whitelist,\n\t\t\tforum_filter: Whitelist,\n\t\t\tmain_keywords: vec![\"A\".to_owned(), \"B\".to_owned(), \"C\".to_owned(), \"D\".to_owned()],\n\t\t\tforum_keywords: vec![\"W\".to_owned(), \"X\".to_owned(), \"Y\".to_owned(), \"Z\".to_owned()],\n\t\t}, ScrapeType::Main), true);\n\t}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn main_test_filter_blacklist_match() {\n\t\tassert_eq!(match_filter(\"C\", &Hooks {\n\t\t\tname: \"\".to_string(),\n\t\t\ttoken: \"\".to_string(),\n\t\t\tuid: 0,\n\t\t\tmain_filter: Blacklist,\n\t\t\tforum_filter: Blacklist,\n\t\t\tmain_keywords: vec![\"A\".to_owned(), \"B\".to_owned(), \"C\".to_owned(), \"D\".to_owned()],\n\t\t\tforum_keywords: vec![\"A\".to_owned(), \"B\".to_owned(), \"C\".to_owned(), \"D\".to_owned()],\n\t\t}, ScrapeType::Main), true);\n\t}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn main_test_filter_blacklist_miss() {\n\t\tassert_eq!(match_filter(\"E\", &Hooks {\n\t\t\tname: \"\".to_string(),\n\t\t\ttoken: \"\".to_string(),\n\t\t\tuid: 0,\n\t\t\tmain_filter: Blacklist,\n\t\t\tforum_filter: Blacklist,\n\t\t\tmain_keywords: vec![\"A\".to_owned(), \"B\".to_owned(), \"C\".to_owned(), \"D\".to_owned()],\n\t\t\tforum_keywords: vec![\"A\".to_owned(), \"B\".to_owned(), \"C\".to_owned(), \"D\".to_owned()],\n\t\t}, ScrapeType::Main), true);\n\t}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn forum_test_filter_default_pass() {\n\t\tassert_eq!(match_filter(\"pass\", &Hooks {\n\t\t\tname: \"\".to_string(),\n\t\t\ttoken: \"\".to_string(),\n\t\t\tuid: 0,\n\t\t\tmain_filter: FilterType::default(),\n\t\t\tforum_filter: FilterType::default(),\n\t\t\tmain_keywords: vec![],\n\t\t\tforum_keywords: vec![],\n\t\t}, ScrapeType::Forum), true)\n\t}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn forum_test_filter_default_no_match() {\n\t\tassert_eq!(match_filter(\"xyz\", &Hooks {\n\t\t\tname: \"\".to_string(),\n\t\t\ttoken: \"\".to_string(),\n\t\t\tuid: 0,\n\t\t\tmain_filter: FilterType::default(),\n\t\t\tforum_filter: FilterType::default(),\n\t\t\tmain_keywords: vec![],\n\t\t\tforum_keywords: vec![],\n\t\t}, ScrapeType::Forum), false);\n\t}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn forum_test_filter_whitelist_match() {\n\t\tassert_eq!(match_filter(\"C\", &Hooks {\n\t\t\tname: \"\".to_string(),\n\t\t\ttoken: \"\".to_string(),\n\t\t\tuid: 0,\n\t\t\tmain_filter: Whitelist,\n\t\t\tforum_filter: Blacklist,\n\t\t\tmain_keywords: vec![\"A\".to_owned(), \"B\".to_owned(), \"C\".to_owned(), \"D\".to_owned()],\n\t\t\tforum_keywords: vec![\"W\".to_owned(), \"X\".to_owned(), \"Y\".to_owned(), \"Z\".to_owned()],\n\t\t}, ScrapeType::Forum), true);\n\t}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn forum_test_filter_whitelist_miss() {\n\t\tassert_eq!(match_filter(\"E\", &Hooks {\n\t\t\tname: \"\".to_string(),\n\t\t\ttoken: \"\".to_string(),\n\t\t\tuid: 0,\n\t\t\tmain_filter: Whitelist,\n\t\t\tforum_filter: Whitelist,\n\t\t\tmain_keywords: vec![\"A\".to_owned(), \"B\".to_owned(), \"C\".to_owned(), \"D\".to_owned()],\n\t\t\tforum_keywords: vec![\"W\".to_owned(), \"X\".to_owned(), \"Y\".to_owned(), \"Z\".to_owned()],\n\t\t}, ScrapeType::Forum), false);\n\t}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn forum_test_filter_blacklist_match() {\n\t\tassert_eq!(match_filter(\"C\", &Hooks {\n\t\t\tname: \"\".to_string(),\n\t\t\ttoken: \"\".to_string(),\n\t\t\tuid: 0,\n\t\t\tmain_filter: Blacklist,\n\t\t\tforum_filter: Blacklist,\n\t\t\tmain_keywords: vec![\"A\".to_owned(), \"B\".to_owned(), \"C\".to_owned(), \"D\".to_owned()],\n\t\t\tforum_keywords: vec![\"A\".to_owned(), \"B\".to_owned(), \"C\".to_owned(), \"D\".to_owned()],\n\t\t}, ScrapeType::Forum), false);\n\t}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn forum_test_filter_blacklist_miss() {\n\t\tmatch_filter(\"E\", &Hooks {\n\t\t\tname: \"\".to_string(),\n\t\t\ttoken: \"\".to_string(),\n\t\t\tuid: 0,\n\t\t\tmain_filter: Blacklist,\n\t\t\tforum_filter: Blacklist,\n\t\t\tmain_keywords: vec![\"A\".to_owned(), \"B\".to_owned(), \"C\".to_owned(), \"D\".to_owned()],\n\t\t\tforum_keywords: vec![\"A\".to_owned(), \"B\".to_owned(), \"C\".to_owned(), \"D\".to_owned()],\n\t\t}, ScrapeType::Forum);\n\t}\n}" ]
f705bd8260fe0e89c1947437a85c92063456b856
2,658
rs
Rust
crates/nu-command/tests/format_conversions/html.rs
PurityLake/nushell
1cb449b2d14bcd77305024716db13a7dbecaede7
[ "MIT" ]
null
null
null
crates/nu-command/tests/format_conversions/html.rs
PurityLake/nushell
1cb449b2d14bcd77305024716db13a7dbecaede7
[ "MIT" ]
null
null
null
crates/nu-command/tests/format_conversions/html.rs
PurityLake/nushell
1cb449b2d14bcd77305024716db13a7dbecaede7
[ "MIT" ]
null
null
null
use nu_test_support::{nu, pipeline}; #[test] fn out_html_simple() { let actual = nu!( cwd: ".", pipeline( r#" echo 3 | to html "# )); assert_eq!( actual.out, r"<html><style>body { background-color:white;color:black; }</style><body>3</body></html>" ); } #[test] fn out_html_partial() { let actual = nu!( cwd: ".", pipeline( r#" echo 3 | to html -p "# )); assert_eq!( actual.out, "<div style=\"background-color:white;color:black;\">3</div>" ); } #[test] fn out_html_table() { let actual = nu!( cwd: ".", pipeline( r#" echo '{"name": "darren"}' | from json | to html "# )); assert_eq!( actual.out, r"<html><style>body { background-color:white;color:black; }</style><body><table><tr><th>name</th></tr><tr><td>darren</td></tr></table></body></html>" ); } #[test] fn test_cd_html_color_flag_dark_false() { let actual = nu!( cwd: ".", pipeline( r#" cd --help | to html --html-color "# ) ); assert_eq!( actual.out, r"<html><style>body { background-color:white;color:black; }</style><body>Change directory.<br><br>Usage:<br> &gt; cd (path) <br><br>Flags:<br> -h, --help<br> Display this help message<br><br>Parameters:<br> (optional) path: the path to change to<br><br>Examples:<br> Change to your home directory<br> &gt; <span style='color:#037979;font-weight:bold;'>cd<span style='color:black;font-weight:normal;'> </span></span><span style='color:#037979;'>~<span style='color:black;font-weight:normal;'><br><br> Change to a directory via abbreviations<br> &gt; </span><span style='color:#037979;font-weight:bold;'>cd<span style='color:black;font-weight:normal;'> </span></span></span><span style='color:#037979;'>d/s/9<span style='color:black;font-weight:normal;'><br><br></body></html></span></span>" ); } #[test] fn test_no_color_flag() { let actual = nu!( cwd: ".", pipeline( r#" cd --help | to html --no-color "# ) ); assert_eq!( actual.out, r"<html><style>body { background-color:white;color:black; }</style><body>Change directory.<br><br>Usage:<br> &gt; cd (path) <br><br>Flags:<br> -h, --help<br> Display this help message<br><br>Parameters:<br> (optional) path: the path to change to<br><br>Examples:<br> Change to your home directory<br> &gt; cd ~<br><br> Change to a directory via abbreviations<br> &gt; cd d/s/9<br><br></body></html>" ); }
34.519481
809
0.558315
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn out_html_simple() {\n let actual = nu!(\n cwd: \".\", pipeline(\n r#\"\n echo 3 | to html\n \"#\n ));\n\n assert_eq!(\n actual.out,\n r\"<html><style>body { background-color:white;color:black; }</style><body>3</body></html>\"\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn out_html_partial() {\n let actual = nu!(\n cwd: \".\", pipeline(\n r#\"\n echo 3 | to html -p\n \"#\n ));\n\n assert_eq!(\n actual.out,\n \"<div style=\\\"background-color:white;color:black;\\\">3</div>\"\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn out_html_table() {\n let actual = nu!(\n cwd: \".\", pipeline(\n r#\"\n echo '{\"name\": \"darren\"}' | from json | to html\n \"#\n ));\n\n assert_eq!(\n actual.out,\n r\"<html><style>body { background-color:white;color:black; }</style><body><table><tr><th>name</th></tr><tr><td>darren</td></tr></table></body></html>\"\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_cd_html_color_flag_dark_false() {\n let actual = nu!(\n cwd: \".\", pipeline(\n r#\"\n cd --help | to html --html-color\n \"#\n )\n );\n assert_eq!(\n actual.out,\n r\"<html><style>body { background-color:white;color:black; }</style><body>Change directory.<br><br>Usage:<br> &gt; cd (path) <br><br>Flags:<br> -h, --help<br> Display this help message<br><br>Parameters:<br> (optional) path: the path to change to<br><br>Examples:<br> Change to your home directory<br> &gt; <span style='color:#037979;font-weight:bold;'>cd<span style='color:black;font-weight:normal;'> </span></span><span style='color:#037979;'>~<span style='color:black;font-weight:normal;'><br><br> Change to a directory via abbreviations<br> &gt; </span><span style='color:#037979;font-weight:bold;'>cd<span style='color:black;font-weight:normal;'> </span></span></span><span style='color:#037979;'>d/s/9<span style='color:black;font-weight:normal;'><br><br></body></html></span></span>\"\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_no_color_flag() {\n let actual = nu!(\n cwd: \".\", pipeline(\n r#\"\n cd --help | to html --no-color\n \"#\n )\n );\n assert_eq!(\n actual.out,\n r\"<html><style>body { background-color:white;color:black; }</style><body>Change directory.<br><br>Usage:<br> &gt; cd (path) <br><br>Flags:<br> -h, --help<br> Display this help message<br><br>Parameters:<br> (optional) path: the path to change to<br><br>Examples:<br> Change to your home directory<br> &gt; cd ~<br><br> Change to a directory via abbreviations<br> &gt; cd d/s/9<br><br></body></html>\"\n );\n}\n}" ]
f70604c3bbbfb32bb33120cecdbf9d4c66b7dde1
16,887
rs
Rust
src/ensemble/random_forest_classifier.rs
MaxGreil/smartcore
521dab49ef92a955eb860ca195c89feab17c4b59
[ "Apache-2.0" ]
null
null
null
src/ensemble/random_forest_classifier.rs
MaxGreil/smartcore
521dab49ef92a955eb860ca195c89feab17c4b59
[ "Apache-2.0" ]
null
null
null
src/ensemble/random_forest_classifier.rs
MaxGreil/smartcore
521dab49ef92a955eb860ca195c89feab17c4b59
[ "Apache-2.0" ]
null
null
null
//! # Random Forest Classifier //! A random forest is an ensemble estimator that fits multiple [decision trees](../../tree/index.html) to random subsets of the dataset and averages predictions //! to improve the predictive accuracy and control over-fitting. See [ensemble models](../index.html) for more details. //! //! Bigger number of estimators in general improves performance of the algorithm with an increased cost of training time. //! The random sample of _m_ predictors is typically set to be \\(\sqrt{p}\\) from the full set of _p_ predictors. //! //! Example: //! //! ``` //! use smartcore::linalg::naive::dense_matrix::*; //! use smartcore::ensemble::random_forest_classifier::RandomForestClassifier; //! //! // Iris dataset //! let x = DenseMatrix::from_2d_array(&[ //! &[5.1, 3.5, 1.4, 0.2], //! &[4.9, 3.0, 1.4, 0.2], //! &[4.7, 3.2, 1.3, 0.2], //! &[4.6, 3.1, 1.5, 0.2], //! &[5.0, 3.6, 1.4, 0.2], //! &[5.4, 3.9, 1.7, 0.4], //! &[4.6, 3.4, 1.4, 0.3], //! &[5.0, 3.4, 1.5, 0.2], //! &[4.4, 2.9, 1.4, 0.2], //! &[4.9, 3.1, 1.5, 0.1], //! &[7.0, 3.2, 4.7, 1.4], //! &[6.4, 3.2, 4.5, 1.5], //! &[6.9, 3.1, 4.9, 1.5], //! &[5.5, 2.3, 4.0, 1.3], //! &[6.5, 2.8, 4.6, 1.5], //! &[5.7, 2.8, 4.5, 1.3], //! &[6.3, 3.3, 4.7, 1.6], //! &[4.9, 2.4, 3.3, 1.0], //! &[6.6, 2.9, 4.6, 1.3], //! &[5.2, 2.7, 3.9, 1.4], //! ]); //! let y = vec![ //! 0., 0., 0., 0., 0., 0., 0., 0., //! 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., //! ]; //! //! let classifier = RandomForestClassifier::fit(&x, &y, Default::default()).unwrap(); //! let y_hat = classifier.predict(&x).unwrap(); // use the same data for prediction //! ``` //! //! <script src="https://polyfill.io/v3/polyfill.min.js?features=es6"></script> //! <script id="MathJax-script" async src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"></script> use std::default::Default; use std::fmt::Debug; use rand::Rng; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; use crate::api::{Predictor, SupervisedEstimator}; use crate::error::{Failed, FailedError}; use crate::linalg::Matrix; use crate::math::num::RealNumber; use crate::tree::decision_tree_classifier::{ which_max, DecisionTreeClassifier, DecisionTreeClassifierParameters, SplitCriterion, }; /// Parameters of the Random Forest algorithm. /// Some parameters here are passed directly into base estimator. #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, Clone)] pub struct RandomForestClassifierParameters { /// Split criteria to use when building a tree. See [Decision Tree Classifier](../../tree/decision_tree_classifier/index.html) pub criterion: SplitCriterion, /// Tree max depth. See [Decision Tree Classifier](../../tree/decision_tree_classifier/index.html) pub max_depth: Option<u16>, /// The minimum number of samples required to be at a leaf node. See [Decision Tree Classifier](../../tree/decision_tree_classifier/index.html) pub min_samples_leaf: usize, /// The minimum number of samples required to split an internal node. See [Decision Tree Classifier](../../tree/decision_tree_classifier/index.html) pub min_samples_split: usize, /// The number of trees in the forest. pub n_trees: u16, /// Number of random sample of predictors to use as split candidates. pub m: Option<usize>, /// Whether to keep samples used for tree generation. This is required for OOB prediction. pub keep_samples: bool, } /// Random Forest Classifier #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug)] pub struct RandomForestClassifier<T: RealNumber> { parameters: RandomForestClassifierParameters, trees: Vec<DecisionTreeClassifier<T>>, classes: Vec<T>, samples: Option<Vec<Vec<bool>>>, } impl RandomForestClassifierParameters { /// Split criteria to use when building a tree. See [Decision Tree Classifier](../../tree/decision_tree_classifier/index.html) pub fn with_criterion(mut self, criterion: SplitCriterion) -> Self { self.criterion = criterion; self } /// Tree max depth. See [Decision Tree Classifier](../../tree/decision_tree_classifier/index.html) pub fn with_max_depth(mut self, max_depth: u16) -> Self { self.max_depth = Some(max_depth); self } /// The minimum number of samples required to be at a leaf node. See [Decision Tree Classifier](../../tree/decision_tree_classifier/index.html) pub fn with_min_samples_leaf(mut self, min_samples_leaf: usize) -> Self { self.min_samples_leaf = min_samples_leaf; self } /// The minimum number of samples required to split an internal node. See [Decision Tree Classifier](../../tree/decision_tree_classifier/index.html) pub fn with_min_samples_split(mut self, min_samples_split: usize) -> Self { self.min_samples_split = min_samples_split; self } /// The number of trees in the forest. pub fn with_n_trees(mut self, n_trees: u16) -> Self { self.n_trees = n_trees; self } /// Number of random sample of predictors to use as split candidates. pub fn with_m(mut self, m: usize) -> Self { self.m = Some(m); self } /// Whether to keep samples used for tree generation. This is required for OOB prediction. pub fn with_keep_samples(mut self, keep_samples: bool) -> Self { self.keep_samples = keep_samples; self } } impl<T: RealNumber> PartialEq for RandomForestClassifier<T> { fn eq(&self, other: &Self) -> bool { if self.classes.len() != other.classes.len() || self.trees.len() != other.trees.len() { false } else { for i in 0..self.classes.len() { if (self.classes[i] - other.classes[i]).abs() > T::epsilon() { return false; } } for i in 0..self.trees.len() { if self.trees[i] != other.trees[i] { return false; } } true } } } impl Default for RandomForestClassifierParameters { fn default() -> Self { RandomForestClassifierParameters { criterion: SplitCriterion::Gini, max_depth: None, min_samples_leaf: 1, min_samples_split: 2, n_trees: 100, m: Option::None, keep_samples: false, } } } impl<T: RealNumber, M: Matrix<T>> SupervisedEstimator<M, M::RowVector, RandomForestClassifierParameters> for RandomForestClassifier<T> { fn fit( x: &M, y: &M::RowVector, parameters: RandomForestClassifierParameters, ) -> Result<Self, Failed> { RandomForestClassifier::fit(x, y, parameters) } } impl<T: RealNumber, M: Matrix<T>> Predictor<M, M::RowVector> for RandomForestClassifier<T> { fn predict(&self, x: &M) -> Result<M::RowVector, Failed> { self.predict(x) } } impl<T: RealNumber> RandomForestClassifier<T> { /// Build a forest of trees from the training set. /// * `x` - _NxM_ matrix with _N_ observations and _M_ features in each observation. /// * `y` - the target class values pub fn fit<M: Matrix<T>>( x: &M, y: &M::RowVector, parameters: RandomForestClassifierParameters, ) -> Result<RandomForestClassifier<T>, Failed> { let (_, num_attributes) = x.shape(); let y_m = M::from_row_vector(y.clone()); let (_, y_ncols) = y_m.shape(); let mut yi: Vec<usize> = vec![0; y_ncols]; let classes = y_m.unique(); for (i, yi_i) in yi.iter_mut().enumerate().take(y_ncols) { let yc = y_m.get(0, i); *yi_i = classes.iter().position(|c| yc == *c).unwrap(); } let mtry = parameters.m.unwrap_or_else(|| { (T::from(num_attributes).unwrap()) .sqrt() .floor() .to_usize() .unwrap() }); let classes = y_m.unique(); let k = classes.len(); let mut trees: Vec<DecisionTreeClassifier<T>> = Vec::new(); let mut maybe_all_samples: Option<Vec<Vec<bool>>> = Option::None; if parameters.keep_samples { maybe_all_samples = Some(Vec::new()); } for _ in 0..parameters.n_trees { let samples = RandomForestClassifier::<T>::sample_with_replacement(&yi, k); if let Some(ref mut all_samples) = maybe_all_samples { all_samples.push(samples.iter().map(|x| *x != 0).collect()) } let params = DecisionTreeClassifierParameters { criterion: parameters.criterion.clone(), max_depth: parameters.max_depth, min_samples_leaf: parameters.min_samples_leaf, min_samples_split: parameters.min_samples_split, }; let tree = DecisionTreeClassifier::fit_weak_learner(x, y, samples, mtry, params)?; trees.push(tree); } Ok(RandomForestClassifier { parameters, trees, classes, samples: maybe_all_samples, }) } /// Predict class for `x` /// * `x` - _KxM_ data where _K_ is number of observations and _M_ is number of features. pub fn predict<M: Matrix<T>>(&self, x: &M) -> Result<M::RowVector, Failed> { let mut result = M::zeros(1, x.shape().0); let (n, _) = x.shape(); for i in 0..n { result.set(0, i, self.classes[self.predict_for_row(x, i)]); } Ok(result.to_row_vector()) } fn predict_for_row<M: Matrix<T>>(&self, x: &M, row: usize) -> usize { let mut result = vec![0; self.classes.len()]; for tree in self.trees.iter() { result[tree.predict_for_row(x, row)] += 1; } which_max(&result) } /// Predict OOB classes for `x`. `x` is expected to be equal to the dataset used in training. pub fn predict_oob<M: Matrix<T>>(&self, x: &M) -> Result<M::RowVector, Failed> { let (n, _) = x.shape(); if self.samples.is_none() { Err(Failed::because( FailedError::PredictFailed, "Need samples=true for OOB predictions.", )) } else if self.samples.as_ref().unwrap()[0].len() != n { Err(Failed::because( FailedError::PredictFailed, "Prediction matrix must match matrix used in training for OOB predictions.", )) } else { let mut result = M::zeros(1, n); for i in 0..n { result.set(0, i, self.classes[self.predict_for_row_oob(x, i)]); } Ok(result.to_row_vector()) } } fn predict_for_row_oob<M: Matrix<T>>(&self, x: &M, row: usize) -> usize { let mut result = vec![0; self.classes.len()]; for (tree, samples) in self.trees.iter().zip(self.samples.as_ref().unwrap()) { if !samples[row] { result[tree.predict_for_row(x, row)] += 1; } } which_max(&result) } fn sample_with_replacement(y: &[usize], num_classes: usize) -> Vec<usize> { let mut rng = rand::thread_rng(); let class_weight = vec![1.; num_classes]; let nrows = y.len(); let mut samples = vec![0; nrows]; for (l, class_weight_l) in class_weight.iter().enumerate().take(num_classes) { let mut n_samples = 0; let mut index: Vec<usize> = Vec::new(); for (i, y_i) in y.iter().enumerate().take(nrows) { if *y_i == l { index.push(i); n_samples += 1; } } let size = ((n_samples as f64) / *class_weight_l) as usize; for _ in 0..size { let xi: usize = rng.gen_range(0..n_samples); samples[index[xi]] += 1; } } samples } } #[cfg(test)] mod tests { use super::*; use crate::linalg::naive::dense_matrix::DenseMatrix; use crate::metrics::*; #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] #[test] fn fit_predict_iris() { let x = DenseMatrix::from_2d_array(&[ &[5.1, 3.5, 1.4, 0.2], &[4.9, 3.0, 1.4, 0.2], &[4.7, 3.2, 1.3, 0.2], &[4.6, 3.1, 1.5, 0.2], &[5.0, 3.6, 1.4, 0.2], &[5.4, 3.9, 1.7, 0.4], &[4.6, 3.4, 1.4, 0.3], &[5.0, 3.4, 1.5, 0.2], &[4.4, 2.9, 1.4, 0.2], &[4.9, 3.1, 1.5, 0.1], &[7.0, 3.2, 4.7, 1.4], &[6.4, 3.2, 4.5, 1.5], &[6.9, 3.1, 4.9, 1.5], &[5.5, 2.3, 4.0, 1.3], &[6.5, 2.8, 4.6, 1.5], &[5.7, 2.8, 4.5, 1.3], &[6.3, 3.3, 4.7, 1.6], &[4.9, 2.4, 3.3, 1.0], &[6.6, 2.9, 4.6, 1.3], &[5.2, 2.7, 3.9, 1.4], ]); let y = vec![ 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., ]; let classifier = RandomForestClassifier::fit( &x, &y, RandomForestClassifierParameters { criterion: SplitCriterion::Gini, max_depth: None, min_samples_leaf: 1, min_samples_split: 2, n_trees: 100, m: Option::None, keep_samples: false, }, ) .unwrap(); assert!(accuracy(&y, &classifier.predict(&x).unwrap()) >= 0.95); } #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] #[test] fn fit_predict_iris_oob() { let x = DenseMatrix::from_2d_array(&[ &[5.1, 3.5, 1.4, 0.2], &[4.9, 3.0, 1.4, 0.2], &[4.7, 3.2, 1.3, 0.2], &[4.6, 3.1, 1.5, 0.2], &[5.0, 3.6, 1.4, 0.2], &[5.4, 3.9, 1.7, 0.4], &[4.6, 3.4, 1.4, 0.3], &[5.0, 3.4, 1.5, 0.2], &[4.4, 2.9, 1.4, 0.2], &[4.9, 3.1, 1.5, 0.1], &[7.0, 3.2, 4.7, 1.4], &[6.4, 3.2, 4.5, 1.5], &[6.9, 3.1, 4.9, 1.5], &[5.5, 2.3, 4.0, 1.3], &[6.5, 2.8, 4.6, 1.5], &[5.7, 2.8, 4.5, 1.3], &[6.3, 3.3, 4.7, 1.6], &[4.9, 2.4, 3.3, 1.0], &[6.6, 2.9, 4.6, 1.3], &[5.2, 2.7, 3.9, 1.4], ]); let y = vec![ 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., ]; let classifier = RandomForestClassifier::fit( &x, &y, RandomForestClassifierParameters { criterion: SplitCriterion::Gini, max_depth: None, min_samples_leaf: 1, min_samples_split: 2, n_trees: 100, m: Option::None, keep_samples: true, }, ) .unwrap(); assert!( accuracy(&y, &classifier.predict_oob(&x).unwrap()) < accuracy(&y, &classifier.predict(&x).unwrap()) ); } #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] #[test] #[cfg(feature = "serde")] fn serde() { let x = DenseMatrix::from_2d_array(&[ &[5.1, 3.5, 1.4, 0.2], &[4.9, 3.0, 1.4, 0.2], &[4.7, 3.2, 1.3, 0.2], &[4.6, 3.1, 1.5, 0.2], &[5.0, 3.6, 1.4, 0.2], &[5.4, 3.9, 1.7, 0.4], &[4.6, 3.4, 1.4, 0.3], &[5.0, 3.4, 1.5, 0.2], &[4.4, 2.9, 1.4, 0.2], &[4.9, 3.1, 1.5, 0.1], &[7.0, 3.2, 4.7, 1.4], &[6.4, 3.2, 4.5, 1.5], &[6.9, 3.1, 4.9, 1.5], &[5.5, 2.3, 4.0, 1.3], &[6.5, 2.8, 4.6, 1.5], &[5.7, 2.8, 4.5, 1.3], &[6.3, 3.3, 4.7, 1.6], &[4.9, 2.4, 3.3, 1.0], &[6.6, 2.9, 4.6, 1.3], &[5.2, 2.7, 3.9, 1.4], ]); let y = vec![ 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., ]; let forest = RandomForestClassifier::fit(&x, &y, Default::default()).unwrap(); let deserialized_forest: RandomForestClassifier<f64> = bincode::deserialize(&bincode::serialize(&forest).unwrap()).unwrap(); assert_eq!(forest, deserialized_forest); } }
35.777542
161
0.514183
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn fit_predict_iris() {\n let x = DenseMatrix::from_2d_array(&[\n &[5.1, 3.5, 1.4, 0.2],\n &[4.9, 3.0, 1.4, 0.2],\n &[4.7, 3.2, 1.3, 0.2],\n &[4.6, 3.1, 1.5, 0.2],\n &[5.0, 3.6, 1.4, 0.2],\n &[5.4, 3.9, 1.7, 0.4],\n &[4.6, 3.4, 1.4, 0.3],\n &[5.0, 3.4, 1.5, 0.2],\n &[4.4, 2.9, 1.4, 0.2],\n &[4.9, 3.1, 1.5, 0.1],\n &[7.0, 3.2, 4.7, 1.4],\n &[6.4, 3.2, 4.5, 1.5],\n &[6.9, 3.1, 4.9, 1.5],\n &[5.5, 2.3, 4.0, 1.3],\n &[6.5, 2.8, 4.6, 1.5],\n &[5.7, 2.8, 4.5, 1.3],\n &[6.3, 3.3, 4.7, 1.6],\n &[4.9, 2.4, 3.3, 1.0],\n &[6.6, 2.9, 4.6, 1.3],\n &[5.2, 2.7, 3.9, 1.4],\n ]);\n let y = vec![\n 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n ];\n\n let classifier = RandomForestClassifier::fit(\n &x,\n &y,\n RandomForestClassifierParameters {\n criterion: SplitCriterion::Gini,\n max_depth: None,\n min_samples_leaf: 1,\n min_samples_split: 2,\n n_trees: 100,\n m: Option::None,\n keep_samples: false,\n },\n )\n .unwrap();\n\n assert!(accuracy(&y, &classifier.predict(&x).unwrap()) >= 0.95);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn fit_predict_iris_oob() {\n let x = DenseMatrix::from_2d_array(&[\n &[5.1, 3.5, 1.4, 0.2],\n &[4.9, 3.0, 1.4, 0.2],\n &[4.7, 3.2, 1.3, 0.2],\n &[4.6, 3.1, 1.5, 0.2],\n &[5.0, 3.6, 1.4, 0.2],\n &[5.4, 3.9, 1.7, 0.4],\n &[4.6, 3.4, 1.4, 0.3],\n &[5.0, 3.4, 1.5, 0.2],\n &[4.4, 2.9, 1.4, 0.2],\n &[4.9, 3.1, 1.5, 0.1],\n &[7.0, 3.2, 4.7, 1.4],\n &[6.4, 3.2, 4.5, 1.5],\n &[6.9, 3.1, 4.9, 1.5],\n &[5.5, 2.3, 4.0, 1.3],\n &[6.5, 2.8, 4.6, 1.5],\n &[5.7, 2.8, 4.5, 1.3],\n &[6.3, 3.3, 4.7, 1.6],\n &[4.9, 2.4, 3.3, 1.0],\n &[6.6, 2.9, 4.6, 1.3],\n &[5.2, 2.7, 3.9, 1.4],\n ]);\n let y = vec![\n 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n ];\n\n let classifier = RandomForestClassifier::fit(\n &x,\n &y,\n RandomForestClassifierParameters {\n criterion: SplitCriterion::Gini,\n max_depth: None,\n min_samples_leaf: 1,\n min_samples_split: 2,\n n_trees: 100,\n m: Option::None,\n keep_samples: true,\n },\n )\n .unwrap();\n assert!(\n accuracy(&y, &classifier.predict_oob(&x).unwrap())\n < accuracy(&y, &classifier.predict(&x).unwrap())\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn serde() {\n let x = DenseMatrix::from_2d_array(&[\n &[5.1, 3.5, 1.4, 0.2],\n &[4.9, 3.0, 1.4, 0.2],\n &[4.7, 3.2, 1.3, 0.2],\n &[4.6, 3.1, 1.5, 0.2],\n &[5.0, 3.6, 1.4, 0.2],\n &[5.4, 3.9, 1.7, 0.4],\n &[4.6, 3.4, 1.4, 0.3],\n &[5.0, 3.4, 1.5, 0.2],\n &[4.4, 2.9, 1.4, 0.2],\n &[4.9, 3.1, 1.5, 0.1],\n &[7.0, 3.2, 4.7, 1.4],\n &[6.4, 3.2, 4.5, 1.5],\n &[6.9, 3.1, 4.9, 1.5],\n &[5.5, 2.3, 4.0, 1.3],\n &[6.5, 2.8, 4.6, 1.5],\n &[5.7, 2.8, 4.5, 1.3],\n &[6.3, 3.3, 4.7, 1.6],\n &[4.9, 2.4, 3.3, 1.0],\n &[6.6, 2.9, 4.6, 1.3],\n &[5.2, 2.7, 3.9, 1.4],\n ]);\n let y = vec![\n 0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n ];\n\n let forest = RandomForestClassifier::fit(&x, &y, Default::default()).unwrap();\n\n let deserialized_forest: RandomForestClassifier<f64> =\n bincode::deserialize(&bincode::serialize(&forest).unwrap()).unwrap();\n\n assert_eq!(forest, deserialized_forest);\n }\n}" ]
f70650b15535f1eac8ae45c607d625b0fa56e3af
21,342
rs
Rust
src/crypto.rs
gakz/hedera-ios-sdk
5e37ff27bda18c64bc74af4c34eee617b8104075
[ "Apache-2.0" ]
8
2019-06-08T17:45:26.000Z
2019-09-24T22:15:30.000Z
src/crypto.rs
gakz/hedera-ios-sdk
5e37ff27bda18c64bc74af4c34eee617b8104075
[ "Apache-2.0" ]
1
2019-06-11T14:43:13.000Z
2019-06-12T15:27:28.000Z
src/crypto.rs
gakz/hedera-ios-sdk
5e37ff27bda18c64bc74af4c34eee617b8104075
[ "Apache-2.0" ]
3
2019-07-09T19:10:06.000Z
2020-02-13T03:10:13.000Z
use crate::proto::{self, ToProto}; use bip39::{Language, Mnemonic, MnemonicType, Seed}; use ed25519_dalek; use failure::{bail, err_msg, Error}; use failure_derive::Fail; use hex; use num::BigUint; use once_cell::{sync::Lazy, sync_lazy}; use rand::SeedableRng; use rand_chacha::ChaChaRng; use simple_asn1::{ der_decode, der_encode, oid, to_der, ASN1Block, ASN1Class, ASN1DecodeErr, ASN1EncodeErr, FromASN1, ToASN1, OID, }; use std::{ fmt::{self, Debug, Display}, str::FromStr, }; use try_from::{TryFrom, TryInto}; // Types used for (de-)serializing public and secret keys from ASN.1 byte // streams. #[derive(Debug, Fail)] enum ASN1Error { #[fail(display = "{:?}", _0)] Decode(ASN1DecodeErr), #[fail(display = "{:?}", _0)] Encode(ASN1EncodeErr), #[fail(display = "expected `{}`; found: `{}`", expected, found)] UnexpectedType { expected: &'static str, found: String, }, } impl From<ASN1DecodeErr> for ASN1Error { fn from(err: ASN1DecodeErr) -> Self { ASN1Error::Decode(err) } } impl From<ASN1EncodeErr> for ASN1Error { fn from(err: ASN1EncodeErr) -> Self { ASN1Error::Encode(err) } } // [https://tools.ietf.org/id/draft-ietf-curdle-pkix-01.html#rfc.section.3] static OID_ED25519: Lazy<OID> = sync_lazy! { oid!(1, 3, 101, 112) }; // [https://www.ietf.org/rfc/rfc3280.txt] // AlgorithmIdentifier ::= SEQUENCE { // algorithm OBJECT IDENTIFIER, // parameters ANY DEFINED BY algorithm OPTIONAL } #[derive(Debug)] struct AlgorithmIdentifier { algorithm: OID, } impl FromASN1 for AlgorithmIdentifier { type Error = ASN1Error; fn from_asn1(v: &[ASN1Block]) -> Result<(Self, &[ASN1Block]), Self::Error> { let algorithm = if let Some(ASN1Block::Sequence(_, blocks)) = v.get(0) { if let Some(ASN1Block::ObjectIdentifier(_, id)) = blocks.get(0) { id } else { return Err(ASN1Error::UnexpectedType { expected: "OBJECT IDENTIFIER", found: format!("{:?}", blocks.get(0)), }); } } else { return Err(ASN1Error::UnexpectedType { expected: "SEQUENCE", found: format!("{:?}", v.get(0)), }); }; Ok(( Self { // FIXME: Rewrite or improve the ASN.1 lib to remove allocation requirement algorithm: algorithm.clone(), }, &v[1..], )) } } // [https://www.ietf.org/rfc/rfc3280.txt] // SubjectPublicKeyInfo ::= SEQUENCE { // algorithm AlgorithmIdentifier, // subjectPublicKey BIT STRING } #[derive(Debug)] struct SubjectPublicKeyInfo { algorithm: AlgorithmIdentifier, subject_public_key: Vec<u8>, } impl ToASN1 for SubjectPublicKeyInfo { type Error = ASN1Error; fn to_asn1_class(&self, _c: ASN1Class) -> Result<Vec<ASN1Block>, Self::Error> { Ok(vec![ASN1Block::Sequence( 0, vec![ // AlgorithmIdentifier ASN1Block::Sequence( 0, vec![ // Algorithm // FIXME: Rewrite or improve the ASN.1 lib to remove allocation requirement ASN1Block::ObjectIdentifier(0, self.algorithm.algorithm.clone()), ], ), // subjectPublicKey ASN1Block::BitString( 0, self.subject_public_key.len() * 8, // FIXME: Rewrite or improve the ASN.1 lib to remove allocation requirement self.subject_public_key.clone(), ), ], )]) } } impl FromASN1 for SubjectPublicKeyInfo { type Error = ASN1Error; fn from_asn1(v: &[ASN1Block]) -> Result<(Self, &[ASN1Block]), Self::Error> { let (algorithm, subject_public_key) = if let Some(ASN1Block::Sequence(_, blocks)) = v.get(0) { // Parse: algorithm let (algorithm, blocks): (AlgorithmIdentifier, _) = FromASN1::from_asn1(blocks)?; // Parse: subject_public_key if let Some(ASN1Block::BitString(_, _, bytes)) = blocks.get(0) { (algorithm, bytes) } else { return Err(ASN1Error::UnexpectedType { expected: "BIT STRING", found: format!("{:?}", blocks.get(0)), }); } } else { return Err(ASN1Error::UnexpectedType { expected: "SEQUENCE", found: format!("{:?}", v.get(0)), }); }; Ok(( Self { algorithm, // FIXME: Rewrite or improve the ASN.1 lib to remove allocation requirement subject_public_key: subject_public_key.clone(), }, &v[1..], )) } } // [https://www.ietf.org/rfc/rfc5208.txt] // PrivateKeyInfo ::= SEQUENCE { // version INTEGER, // privateKeyAlgorithm AlgorithmIdentifier, // privateKey OCTET STRING, // attributes [0] IMPLICIT Attributes OPTIONAL } struct PrivateKeyInfo { algorithm: AlgorithmIdentifier, private_key: Vec<u8>, } impl ToASN1 for PrivateKeyInfo { type Error = ASN1Error; fn to_asn1_class(&self, _c: ASN1Class) -> Result<Vec<ASN1Block>, Self::Error> { Ok(vec![ASN1Block::Sequence( 0, vec![ // Version ASN1Block::Integer(0, 0.into()), // AlgorithmIdentifier ASN1Block::Sequence( 0, vec![ // Algorithm // FIXME: Rewrite or improve the ASN.1 lib to remove allocation requirement ASN1Block::ObjectIdentifier(0, self.algorithm.algorithm.clone()), ], ), // PrivateKey ASN1Block::OctetString( 0, // FIXME: Rewrite or improve the ASN.1 lib to remove allocation requirement to_der(&ASN1Block::OctetString(0, self.private_key.clone()))?, ), ], )]) } } impl FromASN1 for PrivateKeyInfo { type Error = ASN1Error; fn from_asn1(v: &[ASN1Block]) -> Result<(Self, &[ASN1Block]), Self::Error> { let (algorithm, key) = if let Some(ASN1Block::Sequence(_, blocks)) = v.get(0) { // Parse: algorithm let (algorithm, blocks): (AlgorithmIdentifier, _) = FromASN1::from_asn1(&blocks[1..])?; // Parse: subject_public_key if let Some(ASN1Block::OctetString(_, bytes)) = blocks.get(0) { (algorithm, bytes) } else { return Err(ASN1Error::UnexpectedType { expected: "OCTET STRING", found: format!("{:?}", blocks.get(0)), }); } } else { return Err(ASN1Error::UnexpectedType { expected: "SEQUENCE", found: format!("{:?}", v.get(0)), }); }; Ok(( Self { // FIXME: Rewrite or improve the ASN.1 lib to remove allocation requirement algorithm, // FIXME: Rewrite or improve the ASN.1 lib to remove allocation requirement private_key: key.clone(), }, &v[1..], )) } } /// An ed25519 public key. #[derive(PartialEq, Clone)] #[repr(C)] pub struct PublicKey(ed25519_dalek::PublicKey); impl PublicKey { /// Construct a `PublicKey` from a slice of bytes. /// Bytes are expected to be either a raw key or encoded in ASN.1. pub fn from_bytes(bytes: impl AsRef<[u8]>) -> Result<Self, Error> { let bytes = bytes.as_ref(); if bytes.len() == ed25519_dalek::PUBLIC_KEY_LENGTH { // If the buffer is exactly the length of a public key; assume that this is // a raw key and return it directly return Ok(PublicKey(ed25519_dalek::PublicKey::from_bytes(bytes)?)); } let info: SubjectPublicKeyInfo = der_decode(&bytes)?; if info.algorithm.algorithm != *OID_ED25519 { bail!( "ed25519: unknown public key algorithm: {:?}", info.algorithm.algorithm ); } if info.subject_public_key.len() != ed25519_dalek::PUBLIC_KEY_LENGTH { bail!("ed25519: public key length mismatch"); } Ok(PublicKey(ed25519_dalek::PublicKey::from_bytes( &info.subject_public_key, )?)) } /// Return the `PublicKey` as raw bytes. #[inline] pub fn as_bytes(&self) -> &[u8; ed25519_dalek::PUBLIC_KEY_LENGTH] { self.0.as_bytes() } /// Format a `PublicKey` as a vec of bytes in ASN.1 format. pub fn to_encoded_bytes(&self) -> Vec<u8> { der_encode(&SubjectPublicKeyInfo { algorithm: AlgorithmIdentifier { algorithm: OID_ED25519.clone(), }, subject_public_key: self.0.to_bytes().to_vec(), }) // NOTE: Not possible to fail. Only fail case the library has is if OIDs are // given incorrectly. .unwrap() } /// Verify a signature on a message with this `PublicKey`. pub fn verify(&self, message: impl AsRef<[u8]>, signature: &Signature) -> Result<bool, Error> { match self.0.verify(message.as_ref(), &signature.0) { Ok(_) => Ok(true), Err(error) => { if error.to_string() == "Verification equation was not satisfied" { Ok(false) } else { Err(error)? } } } } } /// Construct a `PublicKey` from a hex representation of a raw or ASN.1 encoded /// key. impl FromStr for PublicKey { type Err = Error; #[inline] fn from_str(s: &str) -> Result<Self, Self::Err> { Self::from_bytes(&hex::decode(s.as_bytes())?) } } impl Debug for PublicKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "\"{}\"", self) } } /// Format a `PublicKey` as a hex representation of its bytes in ASN.1 format. impl Display for PublicKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(&hex::encode(&self.to_encoded_bytes())) } } impl ToProto<proto::BasicTypes::Key> for PublicKey { fn to_proto(&self) -> Result<proto::BasicTypes::Key, Error> { let mut key = proto::BasicTypes::Key::new(); key.set_ed25519(self.as_bytes().to_vec()); Ok(key) } } impl TryFrom<proto::BasicTypes::Key> for PublicKey { type Err = Error; fn try_from(mut key: proto::BasicTypes::Key) -> Result<Self, Self::Err> { if key.has_ed25519() { let bytes = key.take_ed25519(); if bytes.len() == 64 { // This is hex-encoded // CryptoGetInfo returns the public key like this Self::from_bytes(hex::decode(&bytes)?) } else { Self::from_bytes(bytes) } } else if key.has_keyList() && key.get_keyList().keys.len() == 1 { Ok(key.take_keyList().keys.remove(0).try_into()?) } else { Err(err_msg("Only ed25519 public keys are currently supported")) } } } /// An EdDSA secret key. #[repr(C)] pub struct SecretKey(ed25519_dalek::SecretKey); impl SecretKey { /// Generate a `SecretKey` with a BIP-39 mnemonic using a cryptographically /// secure random number generator. /// /// The `password` is required with the mnemonic to reproduce the secret key. pub fn generate(password: &str) -> (Self, String) { let mnemonic = Mnemonic::new(MnemonicType::Words24, Language::English); let secret = Self::generate_with_mnemonic(&mnemonic, password); (secret, mnemonic.into_phrase()) } fn generate_with_mnemonic(mnemonic: &Mnemonic, password: &str) -> Self { let mut seed: [u8; 32] = Default::default(); seed.copy_from_slice(&Seed::new(&mnemonic, password).as_bytes()[0..32]); let mut rng = ChaChaRng::from_seed(seed); SecretKey(ed25519_dalek::SecretKey::generate(&mut rng)) } /// Construct a `SecretKey` from a slice of bytes. /// Bytes are expected to be either a raw key or encoded in ASN.1. pub fn from_bytes(bytes: impl AsRef<[u8]>) -> Result<Self, Error> { let bytes = bytes.as_ref(); if bytes.len() == ed25519_dalek::SECRET_KEY_LENGTH + ed25519_dalek::PUBLIC_KEY_LENGTH || bytes.len() == ed25519_dalek::SECRET_KEY_LENGTH { // If the buffer looks like a {secret}{public} byte string; just pull the secret // key bytes off of it return Ok(SecretKey(ed25519_dalek::SecretKey::from_bytes( &bytes[..ed25519_dalek::SECRET_KEY_LENGTH], )?)); } let info: PrivateKeyInfo = der_decode(&bytes)?; if info.algorithm.algorithm != *OID_ED25519 { bail!( "ed25519: PKCS#8 wrapping contained private key with unknown algorithm: {:?}", info.algorithm.algorithm ); } Ok(SecretKey(ed25519_dalek::SecretKey::from_bytes( &info.private_key[2..], )?)) } /// Re-construct a `SecretKey` from the supplied mnemonic and password. pub fn from_mnemonic(mnemonic: &str, password: &str) -> Result<Self, Error> { let mnemonic = Mnemonic::from_phrase(mnemonic, Language::English)?; Ok(Self::generate_with_mnemonic(&mnemonic, password)) } /// Return the `SecretKey` as raw bytes. #[inline] pub fn as_bytes(&self) -> &[u8; ed25519_dalek::PUBLIC_KEY_LENGTH] { self.0.as_bytes() } /// Format a `SecretKey` as a vec of bytes in ASN.1 format. pub fn to_encoded_bytes(&self) -> Vec<u8> { der_encode(&PrivateKeyInfo { algorithm: AlgorithmIdentifier { algorithm: OID_ED25519.clone(), }, private_key: self.0.to_bytes().to_vec(), }) // NOTE: Not possible to fail. Only fail case the library has is if OIDs are // given incorrectly. .unwrap() } /// Derive a `PublicKey` from this `SecretKey`. #[inline] pub fn public(&self) -> PublicKey { PublicKey(ed25519_dalek::PublicKey::from(&self.0)) } /// Sign a message with this `SecretKey`. #[inline] pub fn sign(&self, message: impl AsRef<[u8]>) -> Signature { Signature( ed25519_dalek::ExpandedSecretKey::from(&self.0) .sign(message.as_ref(), &self.public().0), ) } } impl Clone for SecretKey { #[inline] fn clone(&self) -> Self { Self::from_bytes(self.0.as_bytes()).unwrap() } } /// Construct a `SecretKey` from a hex representation of a raw or ASN.1 encoded /// key. impl FromStr for SecretKey { type Err = Error; #[inline] fn from_str(s: &str) -> Result<Self, Self::Err> { Self::from_bytes(&hex::decode(s.as_bytes())?) } } impl<E> TryFrom<Result<SecretKey, E>> for SecretKey { type Err = E; #[inline] fn try_from(res: Result<Self, E>) -> Result<Self, Self::Err> { res } } impl<E> TryFrom<Result<String, E>> for SecretKey where E: Sync + Send + 'static + fmt::Debug + fmt::Display, { type Err = Error; #[inline] fn try_from(res: Result<String, E>) -> Result<Self, Error> { res.map_err(err_msg)?.parse() } } impl TryFrom<SecretKey> for SecretKey { type Err = Error; #[inline] fn try_from(self_: Self) -> Result<Self, Error> { Ok(self_) } } impl Debug for SecretKey { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "\"{}\"", self) } } /// Format a `SecretKey` as a hex representation of its bytes in ASN.1 format. impl Display for SecretKey { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(&hex::encode(&self.to_encoded_bytes())) } } /// An EdDSA signature. #[derive(Debug)] #[repr(C)] pub struct Signature(ed25519_dalek::Signature); impl Signature { /// Construct a `Signature` from a slice of bytes. #[inline] pub fn from_bytes(bytes: impl AsRef<[u8]>) -> Result<Self, Error> { Ok(Signature(ed25519_dalek::Signature::from_bytes( bytes.as_ref(), )?)) } /// Return the `Signature` as raw bytes. #[inline] pub fn to_bytes(&self) -> [u8; ed25519_dalek::SIGNATURE_LENGTH] { self.0.to_bytes() } } /// Construct a `Signature` from a hex representation of the signature. impl FromStr for Signature { type Err = Error; #[inline] fn from_str(s: &str) -> Result<Self, Self::Err> { Self::from_bytes(&hex::decode(s.as_bytes())?) } } /// Format a `Signature` as a hex representation of its bytes. impl Display for Signature { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(&hex::encode(&self.to_bytes()[..])) } } impl ToProto<proto::BasicTypes::Signature> for Signature { fn to_proto(&self) -> Result<proto::BasicTypes::Signature, Error> { let mut signature = proto::BasicTypes::Signature::new(); signature.set_ed25519(self.to_bytes().to_vec()); Ok(signature) } } impl<'a> ToProto<proto::BasicTypes::Signature> for &'a [&'a Signature] { fn to_proto(&self) -> Result<proto::BasicTypes::Signature, Error> { let mut list = proto::BasicTypes::SignatureList::new(); for signature in self.iter() { list.sigs.push(signature.to_proto()?); } let mut wrapper = proto::BasicTypes::Signature::new(); wrapper.set_signatureList(list); Ok(wrapper) } } #[cfg(test)] mod tests { use super::{PublicKey, SecretKey, Signature}; use failure::Error; const KEY_PUBLIC_ASN1_HEX: &str = "302a300506032b6570032100e0c8ec2758a5879ffac226a13c0c516b799e72e35141a0dd828f94d37988a4b7"; const KEY_PUBLIC_HEX: &str = "e0c8ec2758a5879ffac226a13c0c516b799e72e35141a0dd828f94d37988a4b7"; const KEY_SECRET_ASN1_HEX: &str = "302e020100300506032b657004220420db484b828e64b2d8f12ce3c0a0e93a0b8cce7af1bb8f39c97732394482538e10"; const KEY_SECRET_HEX: &str = "db484b828e64b2d8f12ce3c0a0e93a0b8cce7af1bb8f39c97732394482538e10\ e0c8ec2758a5879ffac226a13c0c516b799e72e35141a0dd828f94d37988a4b7"; const MESSAGE: &str = "This is a message about the world."; const SIGNATURE: &str = "73bea53f31ca9c42a422ecb7516ec08d0bbd1a6bfd630ccf10ec1872454814d29f4a8011129cd007eab544af01a75f508285b591e5bed24b68f927751e49e30e"; #[test] fn test_parse() -> Result<(), Error> { let public_key1: PublicKey = KEY_PUBLIC_ASN1_HEX.parse()?; let public_key2: PublicKey = KEY_PUBLIC_HEX.parse()?; let secret_key1: SecretKey = KEY_SECRET_ASN1_HEX.parse()?; let secret_key2: SecretKey = KEY_SECRET_HEX.parse()?; assert_eq!(public_key1, public_key2); assert_eq!(secret_key1.0.as_bytes(), secret_key2.0.as_bytes()); assert_eq!(public_key1, secret_key1.public()); assert_eq!(public_key2, secret_key2.public()); assert_eq!(secret_key2.public(), secret_key1.public()); Ok(()) } #[test] fn test_verify() -> Result<(), Error> { let key: PublicKey = KEY_PUBLIC_ASN1_HEX.parse()?; let signature: Signature = SIGNATURE.parse()?; let verified = key.verify(MESSAGE.as_bytes(), &signature)?; assert!(verified); Ok(()) } #[test] fn test_sign() -> Result<(), Error> { let key: SecretKey = KEY_SECRET_ASN1_HEX.parse()?; let signature = key.sign(MESSAGE.as_bytes()); assert_eq!(SIGNATURE, signature.to_string()); Ok(()) } #[test] fn test_generate() -> Result<(), Error> { let (key, _mnemonic) = SecretKey::generate(""); let signature = key.sign(MESSAGE.as_bytes()); let verified = key.public().verify(MESSAGE.as_bytes(), &signature)?; assert!(verified); Ok(()) } #[test] fn test_display() -> Result<(), Error> { let public_key1: PublicKey = KEY_PUBLIC_ASN1_HEX.parse()?; let public_key2: PublicKey = public_key1.to_string().parse()?; let secret_key1: SecretKey = KEY_SECRET_ASN1_HEX.parse()?; let secret_key2: SecretKey = secret_key1.to_string().parse()?; assert_eq!(public_key1, public_key2); assert_eq!(secret_key1.as_bytes(), secret_key2.as_bytes()); Ok(()) } #[test] fn test_reconstruct() -> Result<(), Error> { let (secret1, mnemonic) = SecretKey::generate("this-is-not-a-password"); let secret2 = SecretKey::from_mnemonic(&mnemonic, "this-is-not-a-password")?; assert_eq!(secret1.as_bytes(), secret2.as_bytes()); Ok(()) } }
31.293255
159
0.573798
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_parse() -> Result<(), Error> {\n let public_key1: PublicKey = KEY_PUBLIC_ASN1_HEX.parse()?;\n let public_key2: PublicKey = KEY_PUBLIC_HEX.parse()?;\n\n let secret_key1: SecretKey = KEY_SECRET_ASN1_HEX.parse()?;\n let secret_key2: SecretKey = KEY_SECRET_HEX.parse()?;\n\n assert_eq!(public_key1, public_key2);\n assert_eq!(secret_key1.0.as_bytes(), secret_key2.0.as_bytes());\n assert_eq!(public_key1, secret_key1.public());\n assert_eq!(public_key2, secret_key2.public());\n assert_eq!(secret_key2.public(), secret_key1.public());\n\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_verify() -> Result<(), Error> {\n let key: PublicKey = KEY_PUBLIC_ASN1_HEX.parse()?;\n let signature: Signature = SIGNATURE.parse()?;\n let verified = key.verify(MESSAGE.as_bytes(), &signature)?;\n\n assert!(verified);\n\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_sign() -> Result<(), Error> {\n let key: SecretKey = KEY_SECRET_ASN1_HEX.parse()?;\n let signature = key.sign(MESSAGE.as_bytes());\n\n assert_eq!(SIGNATURE, signature.to_string());\n\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_generate() -> Result<(), Error> {\n let (key, _mnemonic) = SecretKey::generate(\"\");\n let signature = key.sign(MESSAGE.as_bytes());\n let verified = key.public().verify(MESSAGE.as_bytes(), &signature)?;\n\n assert!(verified);\n\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_display() -> Result<(), Error> {\n let public_key1: PublicKey = KEY_PUBLIC_ASN1_HEX.parse()?;\n let public_key2: PublicKey = public_key1.to_string().parse()?;\n\n let secret_key1: SecretKey = KEY_SECRET_ASN1_HEX.parse()?;\n let secret_key2: SecretKey = secret_key1.to_string().parse()?;\n\n assert_eq!(public_key1, public_key2);\n assert_eq!(secret_key1.as_bytes(), secret_key2.as_bytes());\n\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_reconstruct() -> Result<(), Error> {\n let (secret1, mnemonic) = SecretKey::generate(\"this-is-not-a-password\");\n let secret2 = SecretKey::from_mnemonic(&mnemonic, \"this-is-not-a-password\")?;\n\n assert_eq!(secret1.as_bytes(), secret2.as_bytes());\n\n Ok(())\n }\n}" ]
f70659792bb457799c663f913290d78f08e00fa9
35,483
rs
Rust
src/lib.rs
Pierre-Colin/rcvs
ae8710d7e0b4e93aaa3504e9e5fea3f379677dcc
[ "MIT" ]
null
null
null
src/lib.rs
Pierre-Colin/rcvs
ae8710d7e0b4e93aaa3504e9e5fea3f379677dcc
[ "MIT" ]
1
2019-04-07T05:25:24.000Z
2019-04-07T05:25:24.000Z
src/lib.rs
Pierre-Colin/rcvs
ae8710d7e0b4e93aaa3504e9e5fea3f379677dcc
[ "MIT" ]
null
null
null
//! # Randomized Condorcet Voting System //! //! The crate `rcvs` implements the Randomized Condorcet Voting System, a //! strategy-proof voting system using game theory to generalize the original //! Condorcet method. //! //! ## Condorcet method //! //! The Condorcet method consists of building a directed graph called the _duel //! graph_ of the election. Its vertices are the alternatives to vote among, //! and an arrow between two alternatives _A_ and _B_ means _A_ is preferred //! over _B_ more often than the opposite. The Condorcet criterion states that //! if the duel graph has a unique source, then the alternative it corresponds //! to never loses in a duel against another alternative and therefore must be //! elected. //! //! ## Randomization //! //! If no source or several exist, then the Condorcet criterion is not //! applicable and something else must be used. As surprising as it seems, //! randomly picking the winner usually yields very good properties in voting //! systems, but in order to maximize the electors' utility (or rather minimize //! the number of electors who end up wishing another alternative won), the //! probability distribution used to pick the winner (called strategy) is not //! necessarily uniform. Computing the optimal strategy requires some knowledge //! of game theory and linear programming, and the resulting voting system has //! excellent strategic properties. //! //! ## Implementation //! //! This crate provides structures to carry out elections using the Randomized //! Condorcet Voting System in Rust. It uses the crate //! [nalgebra](https://crates.io/crates/nalgebra) to solve linear programs //! and compute the optimal strategy, and [rand](https://crates.io/crates/rand) //! to generate pseudo-random numbers which are used both for picking winners //! randomly and for more efficient internal numerical algorithms. //! //! It is never mentioned in this documentation, but whenever a method takes an //! argument implementing `rand::Rng`, it means it will make use of //! pseudo-random numbers and the programmer will need to provide one, //! `rand::thread_rng()` being a quick-and-dirty default which is used in this //! crate's unit tests. extern crate nalgebra as na; extern crate rand; mod ballot; mod simplex; mod strategies; pub mod util; use std::{ clone::Clone, cmp::{Eq, Ordering}, collections::{HashMap, HashSet}, error::Error, fmt, hash::Hash, ops::Index, }; pub use ballot::Ballot; pub use ballot::Rank; pub use simplex::SimplexError; pub use strategies::Strategy; type Adjacency = na::DMatrix<bool>; type Matrix = na::DMatrix<f64>; type Vector = na::DVector<f64>; #[derive(Clone, Debug, Hash)] struct Arrow<A>(A, A); impl<A: Eq> PartialEq for Arrow<A> { fn eq(&self, other: &Arrow<A>) -> bool { self.0 == other.0 && self.1 == other.1 } } impl<A: Eq> Eq for Arrow<A> {} /// Implements the duel graph of an election. #[derive(Clone, Debug)] pub struct DuelGraph<A: fmt::Debug> { v: Vec<A>, a: Adjacency, } /// Implements errors in the election process. Interfaces with simplex errors. #[derive(Debug)] pub enum ElectionError { /// The simplex algorithm failed to compute both the minimax and maximin /// strategies; the underlying errors are contained in the arguments. BothFailed(simplex::SimplexError, simplex::SimplexError), /// The simplex algorithm failed to compute the strategy; the underlying /// error is contained in the argument. SimplexFailed(simplex::SimplexError), /// The operation failed because the election is already closed. ElectionClosed, /// The operation failed because the election is still open. ElectionOpen, } impl fmt::Display for ElectionError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ElectionError::BothFailed(a, b) => { writeln!(f, "Both methods failed:")?; writeln!(f, " * minimax: {}", a)?; writeln!(f, " * maximin: {}", b) } ElectionError::SimplexFailed(e) => write!(f, "Simplex algorithm failed: {}", e), ElectionError::ElectionClosed => write!(f, "Election is closed"), ElectionError::ElectionOpen => write!(f, "Election is open"), } } } impl From<simplex::SimplexError> for ElectionError { fn from(error: simplex::SimplexError) -> Self { ElectionError::SimplexFailed(error) } } impl Error for ElectionError { fn description(&self) -> &str { match self { ElectionError::BothFailed(_, _) => { "Both minimax and maximin strategies failed to be solved" } ElectionError::SimplexFailed(_) => { "The simplex algorithm failed to compute the strategy" } ElectionError::ElectionClosed => "Election is already closed", ElectionError::ElectionOpen => "Election is still open", } } fn source(&self) -> Option<&(dyn Error + 'static)> { // in case of multiple cause, no other choice but to return itself match self { ElectionError::BothFailed(_, _) => Some(self), ElectionError::SimplexFailed(e) => Some(e), _ => None, } } } impl<A: fmt::Debug> fmt::Display for DuelGraph<A> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!(f, "Graph {{")?; writeln!(f, "Alternatives: {:?}", self.v)?; writeln!(f, "{}", self.a)?; write!(f, "}}") } } impl<A: fmt::Debug> Index<(usize, usize)> for DuelGraph<A> { type Output = bool; /// Decides whether an arrow is in the graph fn index(&self, (from, to): (usize, usize)) -> &Self::Output { &self.a[(from, to)] } } impl<A: Clone + Eq + Hash + fmt::Debug> DuelGraph<A> { /// Returns a slice of the vertice labels pub fn get_vertices(&self) -> &[A] { &self.v } fn get_special_node(&self, f: impl Fn(usize, usize) -> (usize, usize)) -> Option<A> { let mut n: Option<A> = None; for i in 0..self.v.len() { if (0..self.v.len()).all(|j| !self.a[f(i, j)]) { match n { Some(_) => return None, None => n = Some(self.v[i].clone()), } } } n } /// Returns the source of the graph if it is unique, `None` otherwise. pub fn get_source(&self) -> Option<A> { self.get_special_node(|i, j| (j, i)) } /// Returns the sink of the graph if it is unique, `None` otherwise. pub fn get_sink(&self) -> Option<A> { self.get_special_node(|i, j| (i, j)) } fn adjacency_to_matrix(a: &Adjacency) -> Matrix { let (n, nn) = a.shape(); assert_eq!(n, nn); let mut m = Matrix::zeros(n, n); for i in 0..n { for j in 0..i { if a[(i, j)] { m[(i, j)] = 1f64; m[(j, i)] = -1f64; } else if a[(j, i)] { m[(j, i)] = 1f64; m[(i, j)] = -1f64; } } } m } fn compute_strategy( &self, m: &Matrix, bval: f64, cval: f64, ) -> Result<Strategy<A>, simplex::SimplexError> { let n = self.v.len(); let b = Vector::from_element(n, bval); let c = Vector::from_element(n, cval); let x = simplex::simplex(m, &c, &b)?; let mut mixed_data: Vec<(A, f64)> = self .v .iter() .cloned() .zip(x.into_iter().map(|&x| x)) .collect(); mixed_data.sort_unstable_by(|(_, p), (_, q)| p.partial_cmp(&q).unwrap()); let sum: f64 = mixed_data.iter().map(|(_, p)| p).sum(); for (_, p) in mixed_data.iter_mut() { *p /= sum; } Ok(Strategy::Mixed(mixed_data)) } /// Returns the minimax strategy of the duel graph. /// /// # Errors /// /// If the simplex algorithm fails, returns an error describing the reason /// why. pub fn get_minimax_strategy(&self) -> Result<Strategy<A>, simplex::SimplexError> { let mut m = Self::adjacency_to_matrix(&self.a); m.iter_mut().for_each(|e| *e += 2f64); self.compute_strategy(&m, 1f64, -1f64) } /// Returns the maximin strategy of the duel graph. /// /// # Errors /// /// If the simplex algorithm fails, returns an error describing the reason /// why. pub fn get_maximin_strategy(&self) -> Result<Strategy<A>, simplex::SimplexError> { let mut m = Self::adjacency_to_matrix(&self.a).transpose(); m.iter_mut().for_each(|e| *e = -(*e + 2f64)); self.compute_strategy(&m, -1f64, 1f64) } /// Returns an optimal strategy for the duel graph. /// * If the graph has a source, returns a pure strategy electing it. /// * If the simplex algorithm manages to compute both the minimax and /// maximin strategies, floating-point operations might cause one to score /// slightly higher. Returns the higher-scoring one. /// * If the simplex algorithm only manages to compute one of minimax and /// maximin, returns said strategy. /// /// # Errors /// /// If the simplex algorithm fails to compute both strategies, returns an /// error giving both reasons. pub fn get_optimal_strategy(&self) -> Result<Strategy<A>, ElectionError> { match self.get_source() { Some(x) => Ok(Strategy::Pure(x)), None => match (self.get_minimax_strategy(), self.get_maximin_strategy()) { (Ok(minimax), Ok(maximin)) => { Ok(match self.compare_strategies(&minimax, &maximin) { Ordering::Less => maximin, _ => minimax, }) } (Err(_), Ok(maximin)) => Ok(maximin), (Ok(minimax), Err(_)) => Ok(minimax), (Err(e), Err(f)) => Err(ElectionError::BothFailed(e, f)), }, } } fn strategy_vector(&self, p: &Strategy<A>) -> Vector { match p { Strategy::Pure(x) => Vector::from_iterator( self.v.len(), self.v.iter().map(|e| if e == x { 1f64 } else { 0f64 }), ), Strategy::Mixed(u) => Vector::from_iterator( self.v.len(), self.v .iter() .map(|x| match u.iter().find(|(y, _)| *y == *x) { None => panic!("Alternative not found"), Some((_, p)) => p.clone(), }), ), } } /// Returns a comparating number between two strategies `x` and `y`. If /// negative, then `x` performs worse than `y` for the graph `self`. If /// positive, then `x` performs better than `y` for the graph `self`. pub fn confront_strategies(&self, x: &Strategy<A>, y: &Strategy<A>) -> f64 { let m = Self::adjacency_to_matrix(&self.a); let p = self.strategy_vector(x); let q = self.strategy_vector(y); (p.transpose() * m * q)[(0, 0)] } // NOTE: This is numerically unstable /// Compares two strategies for the given graph to determine which one /// scores the better. /// /// Floating-point operations can make this method unsuitable for some /// uses. Consider using `confront_strategies()` with an epsilon instead. pub fn compare_strategies(&self, x: &Strategy<A>, y: &Strategy<A>) -> std::cmp::Ordering { self.confront_strategies(x, y).partial_cmp(&0f64).unwrap() } } /// Implements an election using the Randomized Condorcet Voting System. #[derive(Clone)] pub struct Election<A: Clone + Eq + Hash> { alternatives: HashSet<A>, duels: HashMap<Arrow<A>, u64>, open: bool, } impl<A: Clone + Eq + Hash + fmt::Debug> Election<A> { /// Creates a new empty election. pub fn new() -> Election<A> { Election::<A> { alternatives: HashSet::new(), duels: HashMap::new(), open: true, } } fn get(&self, x: &A, y: &A) -> Option<u64> { self.duels .get(&Arrow::<A>(x.to_owned(), y.to_owned())) .cloned() } /// Closes the election, preventing the casting of ballots. pub fn close(&mut self) { self.open = false; } /// Attemps to cast a ballot. Returns `true` if the casting was successful /// and `false` if it was not (which only happens if the election is /// closed). /// /// Casting an alternative that is not in the set of the alternatives of /// the election will add it to the set; if the electors are not supposed /// to be able to add their own alternatives, enforcing this rule is at the /// responsibility of the programmer using the structure. pub fn cast(&mut self, ballot: Ballot<A>) -> bool { if !self.open { return false; } for x in ballot.iter() { let (a, r) = x; self.alternatives.insert(a.to_owned()); for y in ballot.iter() { let (b, s) = y; self.alternatives.insert(b.to_owned()); if r > s { let n = self.get(a, b).unwrap_or(0) + 1; self.duels.insert(Arrow::<A>(a.to_owned(), b.to_owned()), n); } } } true } /// Attempts to agregate an election `sub` into the main election `self`, /// merging their lists of alternatives and duels. Returns `true` if the /// merging was possible, or `false` if it failed. /// /// Agregating `sub` into `self` requires `sub` to be closed and `self` to /// be open. pub fn agregate(&mut self, sub: Election<A>) -> bool { if !self.open || sub.open { return false; } for x in sub.alternatives.into_iter() { self.alternatives.insert(x); } for (Arrow::<A>(x, y), m) in sub.duels.into_iter() { let n = m + self.get(&x, &y).unwrap_or(0); self.duels.insert(Arrow::<A>(x, y), n); } true } /// Attempts to normalize an election. If the election is still open, this /// method does nothing. Normalizing means setting the election's internal /// state so that it reflects what the duel graph would be. In other /// words, if the election counted that `a` electors prefer `A` over `B` /// and `b` electors prefer `B` over `A`, then: /// * if `a > b`, then it will be as if it only counted one elector /// prefering `A` over `B`; /// * if `b > a`, then it will be as if it only counted one elector /// prefering `B` over `A`; /// * if `a == b`, then it will be as if no elector ever compared `A` to /// `B`. /// /// Since this method requires the election to be closed, it cannot be /// used to mess with a direct election. This method is intended to be used /// with `agregate()` to carry out elections working like the American /// Electoral College. /// /// Normalizing an election before computing its duel graph is not /// necessary. /// /// # Example /// /// ``` /// # use rcvs::Election; /// let mut sub_a = Election::new(); /// // -- carry out election sub_a -- /// # sub_a.add_alternative(&"Alpha"); /// sub_a.close(); /// /// let mut sub_b = Election::new(); /// // -- carry out election sub_b -- /// # sub_b.add_alternative(&"Alpha"); /// sub_b.close(); /// /// /* /// * normalize both elections so that the main election treats them /// * equally /// */ /// sub_a.normalize(); /// sub_b.normalize(); /// /// // agregate both elections into a main election /// let mut e = Election::new(); /// e.agregate(sub_a); /// e.agregate(sub_b); /// e.close(); /// ``` pub fn normalize(&mut self) { if self.open { return; } for x in self.alternatives.iter() { for y in self.alternatives.iter() { let xy = Arrow::<A>(x.clone(), y.clone()); let yx = Arrow::<A>(y.clone(), x.clone()); // Dirty workaround for the fact `if let` borrows self.duels let m; if let Some(k) = self.duels.get(&xy) { m = k.clone(); } else { continue; } let n; if let Some(k) = self.duels.get(&yx) { n = k.clone(); } else { continue; } match m.cmp(&n) { Ordering::Less => { self.duels.remove(&xy); self.duels.insert(yx, 1); } Ordering::Equal => { self.duels.remove(&xy); self.duels.remove(&yx); } Ordering::Greater => { self.duels.insert(xy, 1); self.duels.remove(&yx); } } } } } /// Adds an alternative to the set of alternatives without casting any /// vote. Returns `true` if the addition was successful, and `false` if the /// election is closed or if the alternative was already present. pub fn add_alternative(&mut self, v: &A) -> bool { if !self.open { return false; } self.alternatives.insert(v.to_owned()) } /// Returns the duel graph of an election. A duel graph may be computed /// before the election is closed, giving information on a partial result /// of the election. pub fn get_duel_graph(&self) -> DuelGraph<A> { let v: Vec<A> = self.alternatives.iter().cloned().collect(); let n = v.len(); let mut a = Adjacency::from_element(n, n, false); for (i, x) in v.iter().enumerate() { for (j, y) in v.iter().enumerate() { match (self.get(x, y), self.get(y, x)) { (Some(m), Some(n)) if m > n => a[(i, j)] = true, (Some(_), None) => a[(i, j)] = true, _ => (), } } } DuelGraph { v: v, a: a } } /// Decides if `x` is already in the set of alternatives known to the /// election. For an alternative to be there, at least one ballot involving /// it must have been cast, or it must have been manually added with the /// method `add_alternative()`. pub fn has_alternative(&self, x: &A) -> bool { self.alternatives.contains(x) } /// Returns the Condorcet winner of the election if it exists, `None` /// otherwise. /// /// Internally, this method computes the duel graph of the election. /// Instead of calling several methods that do it in the same scope, /// consider computing the duel graph separately and operating on it. pub fn get_condorcet_winner(&self) -> Option<A> { self.get_duel_graph().get_source() } /// Returns the Condorcet loser of the election if it exists, `None` /// otherwise. /// /// Internally, this method computes the duel graph of the election. /// Instead of calling several methods that do it in the same scope, /// consider computing the duel graph separately and operating on it. pub fn get_condorcet_loser(&self) -> Option<A> { self.get_duel_graph().get_sink() } /// Returns the minimax strategy of the election. /// /// Internally, this method computes the duel graph of the election. /// Instead of calling several methods that do it in the same scope, /// consider computing the duel graph separately and operating on it. /// /// # Errors /// /// If the simplex algorithm fails to compute the strategy, an error /// describing the reason why is returned. pub fn get_minimax_strategy(&self) -> Result<Strategy<A>, simplex::SimplexError> { self.get_duel_graph().get_minimax_strategy() } /// Returns the maximin strategy of the election. /// /// Internally, this method computes the duel graph of the election. /// Instead of calling several methods that do it in the same scope, /// consider computing the duel graph separately and operating on it. /// /// # Errors /// /// If the simplex algorithm fails to compute the strategy, an error /// describing the reason why is returned. pub fn get_maximin_strategy(&self) -> Result<Strategy<A>, simplex::SimplexError> { self.get_duel_graph().get_maximin_strategy() } /// Returns the optimal strategy of the election. /// /// Internally, this method computes the duel graph of the election. /// Instead of calling several methods that do it in the same scope, /// consider computing the duel graph separately and operating on it. /// /// # Errors /// /// If the election has no Condorcet winner and the simplex algorithm fails /// to compute both the minimax and maximin strategies, an error describing /// both failures is returned. pub fn get_optimal_strategy(&self) -> Result<Strategy<A>, ElectionError> { self.get_duel_graph().get_optimal_strategy() } /// Elects the winner of the election using the optimal strategy. /// /// Internally, this method computes the duel graph of the election. /// Instead of calling several methods that do it in the same scope, /// consider computing the duel graph separately and operating on it. /// /// # Errors /// /// If the election has no Condorcet winner and the simplex algorithm fails /// to compute both the minimax and maximin strategies, an error describing /// both failures is returned. pub fn get_randomized_winner( &self, rng: &mut impl rand::Rng, ) -> Result<Option<A>, ElectionError> { Ok(self.get_optimal_strategy()?.play(rng)) } } impl<A: Clone + Eq + Hash + fmt::Display> fmt::Display for Election<A> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!(f, "Election {{")?; for x in self.duels.iter() { let (Arrow::<A>(a, b), n) = x; writeln!(f, " {} beats {} {} times", a, b, n)?; } write!(f, "}}") } } pub fn build_graph<A, I, J>(alternatives: I, ballots: J) -> DuelGraph<A> where A: Clone + Eq + fmt::Debug + Hash, I: Iterator<Item = A>, J: Iterator<Item = ballot::Ballot<A>>, { let mut election = Election::new(); for alternative in alternatives { election.add_alternative(&alternative); } for ballot in ballots { election.cast(ballot); } election.get_duel_graph() } #[cfg(test)] mod tests { use super::*; fn random_graph(names: &[String]) -> DuelGraph<String> { let n = rand::random::<usize>() % names.len() + 1; let v = names.iter().take(n).cloned().collect(); let mut a = Adjacency::from_element(n, n, false); for i in 1..n { for j in 0..i { if rand::random::<f64>() < 0.5f64 { a[(i, j)] = true; } else if rand::random::<f64>() < 0.5f64 { a[(j, i)] = true; } } } DuelGraph { v: v, a: a } } #[test] fn source_strategy() { let names = string_vec!["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"]; for n in 1..=names.len() { for _ in 0..100 { let mut m = Adjacency::from_element(n, n, false); (0..n).for_each(|i| { (0..i).for_each(|j| { if rand::random::<f64>() < 0.5f64 { m[(i, j)] = true; } else { m[(j, i)] = true; } }) }); let s = rand::random::<usize>() % n; (0..n).filter(|i| *i != s).for_each(|i| { m[(s, i)] = true; m[(i, s)] = false; }); let g = DuelGraph { v: names.iter().cloned().take(n).collect(), a: m, }; let w; match g.get_source() { Some(x) => w = x, None => panic!("No source in graph {}", g), } assert!( g.get_minimax_strategy() .unwrap() .almost_chooses(&w.to_string(), 1e-6), "Minimax doesn't choose {}", w ); assert!( g.get_maximin_strategy() .unwrap() .almost_chooses(&w.to_string(), 1e-6), "Minimax doesn't choose {}", w ); assert!( g.get_optimal_strategy().unwrap().is_pure(), "Optimal strategy is mixed" ); } } } #[test] fn condorcet_paradox() { let mut e = Election::<String>::new(); let mut b = vec![ Ballot::<String>::new(), Ballot::<String>::new(), Ballot::<String>::new(), ]; let names = string_vec!["Alpha", "Bravo", "Charlie"]; for (i, b) in b.iter_mut().enumerate() { for j in 0u64..3u64 { assert!( b.insert(names[(i + (j as usize)) % 3].to_owned(), j, j), "add_entry failed" ); } } for b in b.iter().cloned() { e.cast(b); } let g = e.get_duel_graph(); assert_eq!(g.get_source(), None); assert_eq!(g.get_sink(), None); assert!( g.get_optimal_strategy().unwrap().is_uniform(&names, 1e-6), "Non uniform strategy for Condorcet paradox" ); } // Last name commented out for convenience (doubles testing time) #[test] fn tournament() { let names = string_vec!["Alpha", "Bravo", "Charlie", "Delta", "Echo" /*, "Foxtrot"*/]; for n in 1..=names.len() { println!("Size {}", n); let v: Vec<String> = names.iter().take(n).cloned().collect(); let mut a = Adjacency::from_element(n, n, false); (0..(n - 1)).for_each(|i| ((i + 1)..n).for_each(|j| a[(i, j)] = true)); loop { // Test graph let g = DuelGraph::<String> { v: v.clone(), a: a.clone(), }; match (g.get_minimax_strategy(), g.get_maximin_strategy()) { (Ok(minimax), Ok(maximin)) => { for _ in 0..100 { let p = Strategy::random_mixed(&v, &mut rand::thread_rng()); let vminimax = g.confront_strategies(&minimax, &p); let vmaximin = g.confront_strategies(&maximin, &p); if vminimax < -1e-6 && vmaximin < -1e-6 { panic!( "{:?} beats both:\n * minimax by {}\n{:?}\n * maximin by {}\n{:?}", p, vminimax, minimax, vmaximin, maximin ); } } } (Err(e), Ok(maximin)) => { println!("{}\nMinimax failed: {}", g, e); for _ in 0..100 { let p = Strategy::random_mixed(&v, &mut rand::thread_rng()); let v = g.confront_strategies(&maximin, &p); if v < -1e-6 { panic!("{:?} beats maximin by {}\n{:?}", p, v, maximin); } } } (Ok(minimax), Err(e)) => { println!("{}\nMaximin failed: {}", g, e); for _ in 0..100 { let p = Strategy::random_mixed(&v, &mut rand::thread_rng()); let v = g.confront_strategies(&minimax, &p); if v < -1e-6 { panic!("{:?} beats minimax by {}\n{:?}", p, v, minimax); } } } (Err(e), Err(f)) => { panic!("{}\nBoth failed:\n * minimax: {}\n * maximin: {}", g, e, f) } }; // Next graph let mut carry = true; for i in 1..n { for j in 0..i { if !carry { break; } if a[(i, j)] { a[(i, j)] = false; a[(j, i)] = true; } else { a[(i, j)] = true; a[(j, i)] = false; carry = false; } } } // Stop test if (1..n).all(|i| (0..i).all(|j| !a[(i, j)])) { break; } } } } /* * NOTE: * Wasn't observed to fail anymore after fixing simplex; keep an eye on it * anyway... */ #[test] fn optimal_strategy() { let names = string_vec!["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"]; for _pass in 0..1000 { println!("Pass {}", _pass); let g = random_graph(&names); println!("{}", g); match (g.get_minimax_strategy(), g.get_maximin_strategy()) { (Ok(minimax), Ok(maximin)) => { let opt = g.get_optimal_strategy().unwrap(); assert!( g.confront_strategies(&opt, &minimax) > -1e-6, "Minimax beats optimal strategy" ); assert!( g.confront_strategies(&opt, &maximin) > -1e-6, "Maximin beats optimal strategy" ); } (Ok(minimax), Err(e)) => { println!("Maximin failed: {}", e); let opt = g.get_optimal_strategy().unwrap(); assert!( g.confront_strategies(&opt, &minimax) > -1e-6, "Minimax beats optimal strategy" ); } (Err(e), Ok(maximin)) => { println!("Minimax failed: {}", e); let opt = g.get_optimal_strategy().unwrap(); assert!( g.confront_strategies(&opt, &maximin) > -1e-6, "Maximin beats optimal strategy" ); } (Err(e), Err(f)) => panic!("Both failed: {}\n{}", e, f), } } } fn random_ballot(v: &[String]) -> Ballot<String> { let mut b = Ballot::<String>::new(); for x in v.iter() { let s = rand::random::<u64>(); let r = rand::random::<u64>() % (s + 1); assert!( b.insert(x.to_string(), r, s), "Insert ({}, {}) failed", r, s ); } b } #[test] fn agregate() { let names = string_vec!["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"]; for _ in 0..50 { let mut e = Election::<String>::new(); let mut sum = Election::<String>::new(); let num_district = rand::random::<u64>() % 49 + 2; for _ in 0..num_district { let mut f = Election::<String>::new(); let num_ballot = rand::random::<u64>() % 100; for _ in 0..num_ballot { let b = random_ballot(&names); e.cast(b.clone()); f.cast(b); } f.close(); sum.agregate(f); } // e and sum must be identical assert_eq!(e.alternatives, sum.alternatives, "Alternative lists differ"); for (a, n) in e.duels.into_iter() { match sum.duels.get(&a) { Some(m) => assert_eq!(*m, n, "{:?} is {} in e but {} in sum", a, n, m), None => panic!("{:?} isn't in sum", a), } } } } #[test] fn normalize() { let names = string_vec!["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"]; for _pass in 0..100 { let mut e = Election::<String>::new(); for _ in 0..500 { e.cast(random_ballot(&names)); } let mut n = e.clone(); n.close(); n.normalize(); for x in n.alternatives.iter() { let xx = Arrow::<String>(x.to_string(), x.to_string()); assert_eq!(n.duels.get(&xx), None, "{} wins over itself", x); for y in n.alternatives.iter() { let xy = Arrow::<String>(x.to_string(), y.to_string()); let yx = Arrow::<String>(y.to_string(), x.to_string()); if let Some(m) = n.duels.get(&xy) { assert_eq!(n.duels.get(&yx), None, "{} and {} loop", x, y); assert_eq!(*m, 1, "Normalized election has {}", m); if let Some(n) = e.duels.get(&yx) { assert!(e.duels.get(&xy).unwrap() > n, "Backward normalization"); } } } } } } #[test] fn iterators() { let mut b = vec![ Ballot::<String>::new(), Ballot::<String>::new(), Ballot::<String>::new(), ]; let names = string_vec!["Alpha", "Bravo", "Charlie"]; for (i, b) in b.iter_mut().enumerate() { for j in 0u64..3u64 { assert!( b.insert(names[(i + (j as usize)) % 3].to_owned(), j, j), "add_entry failed" ); } } let g = build_graph(names.iter().cloned(), b.into_iter()); assert_eq!(g.get_source(), None); assert_eq!(g.get_sink(), None); assert!( g.get_optimal_strategy().unwrap().is_uniform(&names, 1e-6), "Non uniform strategy for Condorcet paradox" ); } }
36.769948
103
0.498351
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn source_strategy() {\n let names = string_vec![\"Alpha\", \"Bravo\", \"Charlie\", \"Delta\", \"Echo\", \"Foxtrot\"];\n for n in 1..=names.len() {\n for _ in 0..100 {\n let mut m = Adjacency::from_element(n, n, false);\n (0..n).for_each(|i| {\n (0..i).for_each(|j| {\n if rand::random::<f64>() < 0.5f64 {\n m[(i, j)] = true;\n } else {\n m[(j, i)] = true;\n }\n })\n });\n let s = rand::random::<usize>() % n;\n (0..n).filter(|i| *i != s).for_each(|i| {\n m[(s, i)] = true;\n m[(i, s)] = false;\n });\n let g = DuelGraph {\n v: names.iter().cloned().take(n).collect(),\n a: m,\n };\n let w;\n match g.get_source() {\n Some(x) => w = x,\n None => panic!(\"No source in graph {}\", g),\n }\n assert!(\n g.get_minimax_strategy()\n .unwrap()\n .almost_chooses(&w.to_string(), 1e-6),\n \"Minimax doesn't choose {}\",\n w\n );\n assert!(\n g.get_maximin_strategy()\n .unwrap()\n .almost_chooses(&w.to_string(), 1e-6),\n \"Minimax doesn't choose {}\",\n w\n );\n assert!(\n g.get_optimal_strategy().unwrap().is_pure(),\n \"Optimal strategy is mixed\"\n );\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn condorcet_paradox() {\n let mut e = Election::<String>::new();\n let mut b = vec![\n Ballot::<String>::new(),\n Ballot::<String>::new(),\n Ballot::<String>::new(),\n ];\n let names = string_vec![\"Alpha\", \"Bravo\", \"Charlie\"];\n for (i, b) in b.iter_mut().enumerate() {\n for j in 0u64..3u64 {\n assert!(\n b.insert(names[(i + (j as usize)) % 3].to_owned(), j, j),\n \"add_entry failed\"\n );\n }\n }\n for b in b.iter().cloned() {\n e.cast(b);\n }\n let g = e.get_duel_graph();\n assert_eq!(g.get_source(), None);\n assert_eq!(g.get_sink(), None);\n assert!(\n g.get_optimal_strategy().unwrap().is_uniform(&names, 1e-6),\n \"Non uniform strategy for Condorcet paradox\"\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn tournament() {\n let names = string_vec![\"Alpha\", \"Bravo\", \"Charlie\", \"Delta\", \"Echo\" /*, \"Foxtrot\"*/];\n for n in 1..=names.len() {\n println!(\"Size {}\", n);\n let v: Vec<String> = names.iter().take(n).cloned().collect();\n let mut a = Adjacency::from_element(n, n, false);\n (0..(n - 1)).for_each(|i| ((i + 1)..n).for_each(|j| a[(i, j)] = true));\n loop {\n // Test graph\n let g = DuelGraph::<String> {\n v: v.clone(),\n a: a.clone(),\n };\n match (g.get_minimax_strategy(), g.get_maximin_strategy()) {\n (Ok(minimax), Ok(maximin)) => {\n for _ in 0..100 {\n let p = Strategy::random_mixed(&v, &mut rand::thread_rng());\n let vminimax = g.confront_strategies(&minimax, &p);\n let vmaximin = g.confront_strategies(&maximin, &p);\n if vminimax < -1e-6 && vmaximin < -1e-6 {\n panic!(\n \"{:?} beats both:\\n * minimax by {}\\n{:?}\\n * maximin by {}\\n{:?}\",\n p,\n vminimax,\n minimax,\n vmaximin,\n maximin\n );\n }\n }\n }\n (Err(e), Ok(maximin)) => {\n println!(\"{}\\nMinimax failed: {}\", g, e);\n for _ in 0..100 {\n let p = Strategy::random_mixed(&v, &mut rand::thread_rng());\n let v = g.confront_strategies(&maximin, &p);\n if v < -1e-6 {\n panic!(\"{:?} beats maximin by {}\\n{:?}\", p, v, maximin);\n }\n }\n }\n (Ok(minimax), Err(e)) => {\n println!(\"{}\\nMaximin failed: {}\", g, e);\n for _ in 0..100 {\n let p = Strategy::random_mixed(&v, &mut rand::thread_rng());\n let v = g.confront_strategies(&minimax, &p);\n if v < -1e-6 {\n panic!(\"{:?} beats minimax by {}\\n{:?}\", p, v, minimax);\n }\n }\n }\n (Err(e), Err(f)) => {\n panic!(\"{}\\nBoth failed:\\n * minimax: {}\\n * maximin: {}\", g, e, f)\n }\n };\n // Next graph\n let mut carry = true;\n for i in 1..n {\n for j in 0..i {\n if !carry {\n break;\n }\n if a[(i, j)] {\n a[(i, j)] = false;\n a[(j, i)] = true;\n } else {\n a[(i, j)] = true;\n a[(j, i)] = false;\n carry = false;\n }\n }\n }\n // Stop test\n if (1..n).all(|i| (0..i).all(|j| !a[(i, j)])) {\n break;\n }\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn optimal_strategy() {\n let names = string_vec![\"Alpha\", \"Bravo\", \"Charlie\", \"Delta\", \"Echo\", \"Foxtrot\"];\n for _pass in 0..1000 {\n println!(\"Pass {}\", _pass);\n let g = random_graph(&names);\n println!(\"{}\", g);\n match (g.get_minimax_strategy(), g.get_maximin_strategy()) {\n (Ok(minimax), Ok(maximin)) => {\n let opt = g.get_optimal_strategy().unwrap();\n assert!(\n g.confront_strategies(&opt, &minimax) > -1e-6,\n \"Minimax beats optimal strategy\"\n );\n assert!(\n g.confront_strategies(&opt, &maximin) > -1e-6,\n \"Maximin beats optimal strategy\"\n );\n }\n (Ok(minimax), Err(e)) => {\n println!(\"Maximin failed: {}\", e);\n let opt = g.get_optimal_strategy().unwrap();\n assert!(\n g.confront_strategies(&opt, &minimax) > -1e-6,\n \"Minimax beats optimal strategy\"\n );\n }\n (Err(e), Ok(maximin)) => {\n println!(\"Minimax failed: {}\", e);\n let opt = g.get_optimal_strategy().unwrap();\n assert!(\n g.confront_strategies(&opt, &maximin) > -1e-6,\n \"Maximin beats optimal strategy\"\n );\n }\n (Err(e), Err(f)) => panic!(\"Both failed: {}\\n{}\", e, f),\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn agregate() {\n let names = string_vec![\"Alpha\", \"Bravo\", \"Charlie\", \"Delta\", \"Echo\", \"Foxtrot\"];\n for _ in 0..50 {\n let mut e = Election::<String>::new();\n let mut sum = Election::<String>::new();\n let num_district = rand::random::<u64>() % 49 + 2;\n for _ in 0..num_district {\n let mut f = Election::<String>::new();\n let num_ballot = rand::random::<u64>() % 100;\n for _ in 0..num_ballot {\n let b = random_ballot(&names);\n e.cast(b.clone());\n f.cast(b);\n }\n f.close();\n sum.agregate(f);\n }\n // e and sum must be identical\n assert_eq!(e.alternatives, sum.alternatives, \"Alternative lists differ\");\n for (a, n) in e.duels.into_iter() {\n match sum.duels.get(&a) {\n Some(m) => assert_eq!(*m, n, \"{:?} is {} in e but {} in sum\", a, n, m),\n None => panic!(\"{:?} isn't in sum\", a),\n }\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn normalize() {\n let names = string_vec![\"Alpha\", \"Bravo\", \"Charlie\", \"Delta\", \"Echo\", \"Foxtrot\"];\n for _pass in 0..100 {\n let mut e = Election::<String>::new();\n for _ in 0..500 {\n e.cast(random_ballot(&names));\n }\n let mut n = e.clone();\n n.close();\n n.normalize();\n for x in n.alternatives.iter() {\n let xx = Arrow::<String>(x.to_string(), x.to_string());\n assert_eq!(n.duels.get(&xx), None, \"{} wins over itself\", x);\n for y in n.alternatives.iter() {\n let xy = Arrow::<String>(x.to_string(), y.to_string());\n let yx = Arrow::<String>(y.to_string(), x.to_string());\n if let Some(m) = n.duels.get(&xy) {\n assert_eq!(n.duels.get(&yx), None, \"{} and {} loop\", x, y);\n assert_eq!(*m, 1, \"Normalized election has {}\", m);\n if let Some(n) = e.duels.get(&yx) {\n assert!(e.duels.get(&xy).unwrap() > n, \"Backward normalization\");\n }\n }\n }\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn iterators() {\n let mut b = vec![\n Ballot::<String>::new(),\n Ballot::<String>::new(),\n Ballot::<String>::new(),\n ];\n let names = string_vec![\"Alpha\", \"Bravo\", \"Charlie\"];\n for (i, b) in b.iter_mut().enumerate() {\n for j in 0u64..3u64 {\n assert!(\n b.insert(names[(i + (j as usize)) % 3].to_owned(), j, j),\n \"add_entry failed\"\n );\n }\n }\n let g = build_graph(names.iter().cloned(), b.into_iter());\n assert_eq!(g.get_source(), None);\n assert_eq!(g.get_sink(), None);\n assert!(\n g.get_optimal_strategy().unwrap().is_uniform(&names, 1e-6),\n \"Non uniform strategy for Condorcet paradox\"\n );\n }\n}" ]
f706661713e9db3e8f637bb680f03b51963600e0
21,818
rs
Rust
src/proc/bin/starnix/fs/namespace.rs
sffc/fuchsia-clone
633553d647314c9032f75d430c9377b9f6cfc26e
[ "BSD-2-Clause" ]
null
null
null
src/proc/bin/starnix/fs/namespace.rs
sffc/fuchsia-clone
633553d647314c9032f75d430c9377b9f6cfc26e
[ "BSD-2-Clause" ]
1
2022-03-01T01:12:04.000Z
2022-03-01T01:17:26.000Z
src/proc/bin/starnix/fs/namespace.rs
sffc/fuchsia-clone
633553d647314c9032f75d430c9377b9f6cfc26e
[ "BSD-2-Clause" ]
null
null
null
// Copyright 2021 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use std::collections::HashMap; use std::fmt; use std::hash::{Hash, Hasher}; use std::sync::{Arc, Weak}; use once_cell::sync::OnceCell; use parking_lot::RwLock; use super::devfs::dev_tmp_fs; use super::devpts::DevptsFs; use super::proc::proc_fs; use super::sysfs::sys_fs; use super::tmpfs::TmpFs; use super::*; use crate::error; use crate::selinux::selinux_fs; use crate::task::{CurrentTask, Kernel}; use crate::types::*; /// A mount namespace. /// /// The namespace records at which entries filesystems are mounted. pub struct Namespace { root_mount: OnceCell<MountHandle>, // The value in this hashmap is a vector because multiple mounts can be stacked on top of the // same mount point. The last one in the vector shadows the others and is used for lookups. // Unmounting will remove the last entry. Mounting will add an entry. mount_points: RwLock<HashMap<NamespaceNode, Vec<MountHandle>>>, } impl Namespace { pub fn new(fs: FileSystemHandle) -> Arc<Namespace> { // TODO(tbodt): We can avoid this OnceCell thing by using Arc::new_cyclic, but that's // unstable. let namespace = Arc::new(Self { root_mount: OnceCell::new(), mount_points: RwLock::new(HashMap::new()), }); let root = fs.root().clone(); if namespace .root_mount .set(Arc::new(Mount { namespace: Arc::downgrade(&namespace), mountpoint: None, root, _flags: MountFlags::empty(), _fs: fs, })) .is_err() { panic!("there's no way namespace.root_mount could have been set"); } namespace } pub fn root(&self) -> NamespaceNode { self.root_mount.get().unwrap().root() } } impl fmt::Debug for Namespace { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Namespace") .field("root_mount", &self.root_mount) .field("mount_points", &self.mount_points.read()) .finish() } } /// An instance of a filesystem mounted in a namespace. /// /// At a mount, path traversal switches from one filesystem to another. /// The client sees a composed directory structure that glues together the /// directories from the underlying FsNodes from those filesystems. struct Mount { namespace: Weak<Namespace>, mountpoint: Option<(Weak<Mount>, DirEntryHandle)>, root: DirEntryHandle, _flags: MountFlags, _fs: FileSystemHandle, } type MountHandle = Arc<Mount>; impl Mount { pub fn root(self: &MountHandle) -> NamespaceNode { NamespaceNode { mount: Some(Arc::clone(self)), entry: Arc::clone(&self.root) } } fn mountpoint(&self) -> Option<NamespaceNode> { let (ref mount, ref node) = &self.mountpoint.as_ref()?; Some(NamespaceNode { mount: Some(mount.upgrade()?), entry: node.clone() }) } } impl fmt::Debug for Mount { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Mount") .field("id", &(self as *const Mount)) .field("mountpoint", &self.mountpoint) .field("root", &self.root) .finish() } } pub enum WhatToMount { Fs(FileSystemHandle), Dir(DirEntryHandle), } pub fn create_filesystem( kernel: &Arc<Kernel>, _source: &FsStr, fs_type: &FsStr, _data: &FsStr, ) -> Result<WhatToMount, Errno> { use WhatToMount::*; Ok(match fs_type { b"devfs" => Fs(dev_tmp_fs(kernel).clone()), b"devpts" => Fs(DevptsFs::new()), b"proc" => Fs(proc_fs(kernel.clone())), b"selinuxfs" => Fs(selinux_fs(kernel).clone()), b"sysfs" => Fs(sys_fs(kernel).clone()), b"tmpfs" => Fs(TmpFs::new()), _ => return error!(ENODEV), }) } /// The `SymlinkMode` enum encodes how symlinks are followed during path traversal. #[derive(PartialEq, Eq, Copy, Clone, Debug)] /// Whether to follow a symlink at the end of a path resolution. pub enum SymlinkMode { /// Follow a symlink at the end of a path resolution. Follow, /// Do not follow a symlink at the end of a path resolution. NoFollow, } /// The maximum number of symlink traversals that can be made during path resolution. const MAX_SYMLINK_FOLLOWS: u8 = 40; /// The context passed during namespace lookups. /// /// Namespace lookups need to mutate a shared context in order to correctly /// count the number of remaining symlink traversals. pub struct LookupContext { /// The SymlinkMode for the lookup. /// /// As the lookup proceeds, the follow count is decremented each time the /// lookup traverses a symlink. pub symlink_mode: SymlinkMode, /// The number of symlinks remaining the follow. /// /// Each time path resolution calls readlink, this value is decremented. pub remaining_follows: u8, /// Whether the result of the lookup must be a directory. /// /// For example, if the path ends with a `/` or if userspace passes /// O_DIRECTORY. This flag can be set to true if the lookup encounters a /// symlink that ends with a `/`. pub must_be_directory: bool, } impl LookupContext { pub fn new(symlink_mode: SymlinkMode) -> LookupContext { LookupContext { symlink_mode, remaining_follows: MAX_SYMLINK_FOLLOWS, must_be_directory: false, } } pub fn with(&self, symlink_mode: SymlinkMode) -> LookupContext { LookupContext { symlink_mode, remaining_follows: self.remaining_follows, must_be_directory: self.must_be_directory, } } pub fn update_for_path<'a>(&mut self, path: &'a FsStr) -> &'a FsStr { if path.last() == Some(&b'/') { self.must_be_directory = true; trim_trailing_slashes(path) } else { path } } } impl Default for LookupContext { fn default() -> Self { LookupContext::new(SymlinkMode::Follow) } } fn trim_trailing_slashes(path: &FsStr) -> &FsStr { path.iter().rposition(|c| *c != b'/').map(|last| &path[..(last + 1)]).unwrap_or(b"") } /// A node in a mount namespace. /// /// This tree is a composite of the mount tree and the FsNode tree. /// /// These nodes are used when traversing paths in a namespace in order to /// present the client the directory structure that includes the mounted /// filesystems. #[derive(Clone)] pub struct NamespaceNode { /// The mount where this namespace node is mounted. /// /// A given FsNode can be mounted in multiple places in a namespace. This /// field distinguishes between them. mount: Option<MountHandle>, /// The FsNode that corresponds to this namespace entry. pub entry: DirEntryHandle, } impl NamespaceNode { /// Create a namespace node that is not mounted in a namespace. /// /// The returned node does not have a name. pub fn new_anonymous(node: FsNodeHandle) -> Self { Self { mount: None, entry: DirEntry::new(node, None, FsString::new()) } } /// Create a FileObject corresponding to this namespace node. /// /// This function is the primary way of instantiating FileObjects. Each /// FileObject records the NamespaceNode that created it in order to /// remember its path in the Namespace. pub fn open(&self, kernel: &Kernel, flags: OpenFlags) -> Result<FileHandle, Errno> { Ok(FileObject::new(self.entry.node.open(kernel, flags)?, self.clone(), flags)) } pub fn create_node( &self, name: &FsStr, mode: FileMode, dev: DeviceType, ) -> Result<NamespaceNode, Errno> { Ok(self.with_new_entry(self.entry.create_node(name, mode, dev)?)) } pub fn symlink(&self, name: &FsStr, target: &FsStr) -> Result<NamespaceNode, Errno> { Ok(self.with_new_entry(self.entry.create_symlink(name, target)?)) } pub fn unlink(&self, name: &FsStr, kind: UnlinkKind) -> Result<(), Errno> { if DirEntry::is_reserved_name(name) { match kind { UnlinkKind::Directory => { if name == b".." { error!(ENOTEMPTY) } else if self.parent().is_none() { // The client is attempting to remove the root. error!(EBUSY) } else { error!(EINVAL) } } UnlinkKind::NonDirectory => error!(ENOTDIR), } } else { DirEntry::unlink(self, name, kind) } } /// Traverse down a parent-to-child link in the namespace. pub fn lookup_child( &self, current_task: &CurrentTask, context: &mut LookupContext, basename: &FsStr, ) -> Result<NamespaceNode, Errno> { if !self.entry.node.is_dir() { error!(ENOTDIR) } else if basename == b"." || basename == b"" { Ok(self.clone()) } else if basename == b".." { // TODO: make sure this can't escape a chroot Ok(self.parent().unwrap_or_else(|| self.clone())) } else { let mut child = self.with_new_entry(self.entry.component_lookup(basename)?); while child.entry.node.info().mode.is_lnk() { match context.symlink_mode { SymlinkMode::NoFollow => { break; } SymlinkMode::Follow => { if context.remaining_follows == 0 { return error!(ELOOP); } context.remaining_follows -= 1; child = match child.entry.node.readlink(current_task)? { SymlinkTarget::Path(link_target) => { let link_directory = if link_target[0] == b'/' { current_task.fs.root.clone() } else { self.clone() }; current_task.lookup_path(context, link_directory, &link_target)? } SymlinkTarget::Node(node) => node, } } }; } if let Some(namespace) = self.namespace() { if let Some(mounts_at_point) = namespace.mount_points.read().get(&child) { if let Some(mount) = mounts_at_point.last() { return Ok(mount.root()); } } } Ok(child) } } /// Whether the child is a mount point in this NamespaceNode. pub fn child_is_mountpoint(&self, child_entry: &DirEntryHandle) -> bool { let child = self.with_new_entry(child_entry.clone()); self.namespace().map(|ns| ns.mount_points.read().contains_key(&child)).unwrap_or(false) } /// If this is the root of a mount, go up a level and return the mount point. Otherwise return /// the same node. /// /// This is not exactly the same as parent(). If parent() is called on a root, it will escape /// the mount, but then return the parent of the mount point instead of the mount point. pub fn escape_mount(&self) -> NamespaceNode { self.mountpoint().unwrap_or_else(|| self.clone()) } /// Traverse up a child-to-parent link in the namespace. /// /// This traversal matches the child-to-parent link in the underlying /// FsNode except at mountpoints, where the link switches from one /// filesystem to another. pub fn parent(&self) -> Option<NamespaceNode> { let mountpoint_or_self = self.escape_mount(); Some(mountpoint_or_self.with_new_entry(mountpoint_or_self.entry.parent()?.clone())) } /// Returns the mountpoint at this location in the namespace. /// /// If this node is mounted in another node, this function returns the node /// at which this node is mounted. Otherwise, returns None. fn mountpoint(&self) -> Option<NamespaceNode> { if let Some(mount) = &self.mount { if Arc::ptr_eq(&self.entry, &mount.root) { return mount.mountpoint(); } } None } /// The path from the root of the namespace to this node. pub fn path(&self) -> FsString { if self.mount.is_none() { return self.entry.local_name().to_vec(); } let mut components = vec![]; let mut current_task = self.mountpoint().unwrap_or_else(|| self.clone()); while let Some(parent) = current_task.parent() { components.push(current_task.entry.local_name().to_vec()); current_task = parent.mountpoint().unwrap_or(parent); } if components.is_empty() { return b"/".to_vec(); } components.push(vec![]); components.reverse(); components.join(&b'/') } pub fn mount(&self, root: WhatToMount, flags: MountFlags) -> Result<(), Errno> { if let Some(namespace) = self.namespace() { let mut mounts_hash = namespace.mount_points.write(); let mounts_at_point = mounts_hash.entry(self.clone()).or_default(); let mount = self.mount.as_ref().unwrap(); let (fs, root) = match root { WhatToMount::Fs(fs) => { let root = fs.root().clone(); (fs, root) } WhatToMount::Dir(entry) => (entry.node.fs(), entry), }; mounts_at_point.push(Arc::new(Mount { namespace: mount.namespace.clone(), mountpoint: Some((Arc::downgrade(&mount), self.entry.clone())), root, _flags: flags, _fs: fs, })); Ok(()) } else { error!(EBUSY) } } pub fn mount_eq(a: &NamespaceNode, b: &NamespaceNode) -> bool { a.mount.as_ref().map(Arc::as_ptr) == b.mount.as_ref().map(Arc::as_ptr) } fn with_new_entry(&self, entry: DirEntryHandle) -> NamespaceNode { NamespaceNode { mount: self.mount.clone(), entry } } fn namespace(&self) -> Option<Arc<Namespace>> { self.mount.as_ref().and_then(|mount| mount.namespace.upgrade()) } } impl fmt::Debug for NamespaceNode { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("NamespaceNode") .field("path", &String::from_utf8_lossy(&self.path())) .field("mount", &self.mount) .field("entry", &self.entry) .finish() } } // Eq/Hash impls intended for the MOUNT_POINTS hash impl PartialEq for NamespaceNode { fn eq(&self, other: &Self) -> bool { self.mount.as_ref().map(Arc::as_ptr).eq(&other.mount.as_ref().map(Arc::as_ptr)) && Arc::ptr_eq(&self.entry, &other.entry) } } impl Eq for NamespaceNode {} impl Hash for NamespaceNode { fn hash<H: Hasher>(&self, state: &mut H) { self.mount.as_ref().map(Arc::as_ptr).hash(state); Arc::as_ptr(&self.entry).hash(state); } } #[cfg(test)] mod test { use super::*; use crate::errno; use crate::fs::tmpfs::TmpFs; use crate::testing::*; #[test] fn test_namespace() -> anyhow::Result<()> { let (_kernel, current_task) = create_kernel_and_task(); let root_fs = TmpFs::new(); let root_node = Arc::clone(root_fs.root()); let _dev_node = root_node.create_dir(b"dev").expect("failed to mkdir dev"); let dev_fs = TmpFs::new(); let dev_root_node = Arc::clone(dev_fs.root()); let _dev_pts_node = dev_root_node.create_dir(b"pts").expect("failed to mkdir pts"); let ns = Namespace::new(root_fs.clone()); let mut context = LookupContext::default(); let dev = ns .root() .lookup_child(&current_task, &mut context, b"dev") .expect("failed to lookup dev"); dev.mount(WhatToMount::Fs(dev_fs), MountFlags::empty()) .expect("failed to mount dev root node"); let mut context = LookupContext::default(); let dev = ns .root() .lookup_child(&current_task, &mut context, b"dev") .expect("failed to lookup dev"); let mut context = LookupContext::default(); let pts = dev.lookup_child(&current_task, &mut context, b"pts").expect("failed to lookup pts"); let pts_parent = pts.parent().ok_or(errno!(ENOENT)).expect("failed to get parent of pts"); assert!(Arc::ptr_eq(&pts_parent.entry, &dev.entry)); let dev_parent = dev.parent().ok_or(errno!(ENOENT)).expect("failed to get parent of dev"); assert!(Arc::ptr_eq(&dev_parent.entry, &ns.root().entry)); Ok(()) } #[test] fn test_mount_does_not_upgrade() -> anyhow::Result<()> { let (_kernel, current_task) = create_kernel_and_task(); let root_fs = TmpFs::new(); let root_node = Arc::clone(root_fs.root()); let _dev_node = root_node.create_dir(b"dev").expect("failed to mkdir dev"); let dev_fs = TmpFs::new(); let dev_root_node = Arc::clone(dev_fs.root()); let _dev_pts_node = dev_root_node.create_dir(b"pts").expect("failed to mkdir pts"); let ns = Namespace::new(root_fs.clone()); let mut context = LookupContext::default(); let dev = ns .root() .lookup_child(&current_task, &mut context, b"dev") .expect("failed to lookup dev"); dev.mount(WhatToMount::Fs(dev_fs), MountFlags::empty()) .expect("failed to mount dev root node"); let mut context = LookupContext::default(); let new_dev = ns .root() .lookup_child(&current_task, &mut context, b"dev") .expect("failed to lookup dev again"); assert!(!Arc::ptr_eq(&dev.entry, &new_dev.entry)); assert_ne!(&dev, &new_dev); let mut context = LookupContext::default(); let _new_pts = new_dev .lookup_child(&current_task, &mut context, b"pts") .expect("failed to lookup pts"); let mut context = LookupContext::default(); assert!(dev.lookup_child(&current_task, &mut context, b"pts").is_err()); Ok(()) } #[test] fn test_path() -> anyhow::Result<()> { let (_kernel, current_task) = create_kernel_and_task(); let root_fs = TmpFs::new(); let root_node = Arc::clone(root_fs.root()); let _dev_node = root_node.create_dir(b"dev").expect("failed to mkdir dev"); let dev_fs = TmpFs::new(); let dev_root_node = Arc::clone(dev_fs.root()); let _dev_pts_node = dev_root_node.create_dir(b"pts").expect("failed to mkdir pts"); let ns = Namespace::new(root_fs.clone()); let mut context = LookupContext::default(); let dev = ns .root() .lookup_child(&current_task, &mut context, b"dev") .expect("failed to lookup dev"); dev.mount(WhatToMount::Fs(dev_fs), MountFlags::empty()) .expect("failed to mount dev root node"); let mut context = LookupContext::default(); let dev = ns .root() .lookup_child(&current_task, &mut context, b"dev") .expect("failed to lookup dev"); let mut context = LookupContext::default(); let pts = dev.lookup_child(&current_task, &mut context, b"pts").expect("failed to lookup pts"); assert_eq!(b"/".to_vec(), ns.root().path()); assert_eq!(b"/dev".to_vec(), dev.path()); assert_eq!(b"/dev/pts".to_vec(), pts.path()); Ok(()) } #[test] fn test_shadowing() -> anyhow::Result<()> { let (_kernel, current_task) = create_kernel_and_task(); let root_fs = TmpFs::new(); let ns = Namespace::new(root_fs.clone()); let _foo_node = root_fs.root().create_dir(b"foo")?; let mut context = LookupContext::default(); let foo_dir = ns.root().lookup_child(&current_task, &mut context, b"foo")?; let foofs1 = TmpFs::new(); foo_dir.mount(WhatToMount::Fs(foofs1.clone()), MountFlags::empty())?; let mut context = LookupContext::default(); assert!(Arc::ptr_eq( &ns.root().lookup_child(&current_task, &mut context, b"foo")?.entry, foofs1.root() )); let foofs2 = TmpFs::new(); foo_dir.mount(WhatToMount::Fs(foofs2.clone()), MountFlags::empty())?; let mut context = LookupContext::default(); assert!(Arc::ptr_eq( &ns.root().lookup_child(&current_task, &mut context, b"foo")?.entry, foofs2.root() )); Ok(()) } #[test] fn test_trim_trailing_slashes() { assert_eq!(b"", trim_trailing_slashes(b"")); assert_eq!(b"", trim_trailing_slashes(b"/")); assert_eq!(b"", trim_trailing_slashes(b"/////")); assert_eq!(b"abc", trim_trailing_slashes(b"abc")); assert_eq!(b"abc", trim_trailing_slashes(b"abc/")); assert_eq!(b"abc", trim_trailing_slashes(b"abc/////")); assert_eq!(b"abc///xyz", trim_trailing_slashes(b"abc///xyz//")); assert_eq!(b"abc///xyz", trim_trailing_slashes(b"abc///xyz/")); assert_eq!(b"////abc///xyz", trim_trailing_slashes(b"////abc///xyz/")); } }
36.302829
98
0.578926
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_namespace() -> anyhow::Result<()> {\n let (_kernel, current_task) = create_kernel_and_task();\n let root_fs = TmpFs::new();\n let root_node = Arc::clone(root_fs.root());\n let _dev_node = root_node.create_dir(b\"dev\").expect(\"failed to mkdir dev\");\n let dev_fs = TmpFs::new();\n let dev_root_node = Arc::clone(dev_fs.root());\n let _dev_pts_node = dev_root_node.create_dir(b\"pts\").expect(\"failed to mkdir pts\");\n\n let ns = Namespace::new(root_fs.clone());\n let mut context = LookupContext::default();\n let dev = ns\n .root()\n .lookup_child(&current_task, &mut context, b\"dev\")\n .expect(\"failed to lookup dev\");\n dev.mount(WhatToMount::Fs(dev_fs), MountFlags::empty())\n .expect(\"failed to mount dev root node\");\n\n let mut context = LookupContext::default();\n let dev = ns\n .root()\n .lookup_child(&current_task, &mut context, b\"dev\")\n .expect(\"failed to lookup dev\");\n let mut context = LookupContext::default();\n let pts =\n dev.lookup_child(&current_task, &mut context, b\"pts\").expect(\"failed to lookup pts\");\n let pts_parent = pts.parent().ok_or(errno!(ENOENT)).expect(\"failed to get parent of pts\");\n assert!(Arc::ptr_eq(&pts_parent.entry, &dev.entry));\n\n let dev_parent = dev.parent().ok_or(errno!(ENOENT)).expect(\"failed to get parent of dev\");\n assert!(Arc::ptr_eq(&dev_parent.entry, &ns.root().entry));\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_mount_does_not_upgrade() -> anyhow::Result<()> {\n let (_kernel, current_task) = create_kernel_and_task();\n let root_fs = TmpFs::new();\n let root_node = Arc::clone(root_fs.root());\n let _dev_node = root_node.create_dir(b\"dev\").expect(\"failed to mkdir dev\");\n let dev_fs = TmpFs::new();\n let dev_root_node = Arc::clone(dev_fs.root());\n let _dev_pts_node = dev_root_node.create_dir(b\"pts\").expect(\"failed to mkdir pts\");\n\n let ns = Namespace::new(root_fs.clone());\n let mut context = LookupContext::default();\n let dev = ns\n .root()\n .lookup_child(&current_task, &mut context, b\"dev\")\n .expect(\"failed to lookup dev\");\n dev.mount(WhatToMount::Fs(dev_fs), MountFlags::empty())\n .expect(\"failed to mount dev root node\");\n let mut context = LookupContext::default();\n let new_dev = ns\n .root()\n .lookup_child(&current_task, &mut context, b\"dev\")\n .expect(\"failed to lookup dev again\");\n assert!(!Arc::ptr_eq(&dev.entry, &new_dev.entry));\n assert_ne!(&dev, &new_dev);\n\n let mut context = LookupContext::default();\n let _new_pts = new_dev\n .lookup_child(&current_task, &mut context, b\"pts\")\n .expect(\"failed to lookup pts\");\n let mut context = LookupContext::default();\n assert!(dev.lookup_child(&current_task, &mut context, b\"pts\").is_err());\n\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_path() -> anyhow::Result<()> {\n let (_kernel, current_task) = create_kernel_and_task();\n let root_fs = TmpFs::new();\n let root_node = Arc::clone(root_fs.root());\n let _dev_node = root_node.create_dir(b\"dev\").expect(\"failed to mkdir dev\");\n let dev_fs = TmpFs::new();\n let dev_root_node = Arc::clone(dev_fs.root());\n let _dev_pts_node = dev_root_node.create_dir(b\"pts\").expect(\"failed to mkdir pts\");\n\n let ns = Namespace::new(root_fs.clone());\n let mut context = LookupContext::default();\n let dev = ns\n .root()\n .lookup_child(&current_task, &mut context, b\"dev\")\n .expect(\"failed to lookup dev\");\n dev.mount(WhatToMount::Fs(dev_fs), MountFlags::empty())\n .expect(\"failed to mount dev root node\");\n\n let mut context = LookupContext::default();\n let dev = ns\n .root()\n .lookup_child(&current_task, &mut context, b\"dev\")\n .expect(\"failed to lookup dev\");\n let mut context = LookupContext::default();\n let pts =\n dev.lookup_child(&current_task, &mut context, b\"pts\").expect(\"failed to lookup pts\");\n\n assert_eq!(b\"/\".to_vec(), ns.root().path());\n assert_eq!(b\"/dev\".to_vec(), dev.path());\n assert_eq!(b\"/dev/pts\".to_vec(), pts.path());\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_shadowing() -> anyhow::Result<()> {\n let (_kernel, current_task) = create_kernel_and_task();\n let root_fs = TmpFs::new();\n let ns = Namespace::new(root_fs.clone());\n let _foo_node = root_fs.root().create_dir(b\"foo\")?;\n let mut context = LookupContext::default();\n let foo_dir = ns.root().lookup_child(&current_task, &mut context, b\"foo\")?;\n\n let foofs1 = TmpFs::new();\n foo_dir.mount(WhatToMount::Fs(foofs1.clone()), MountFlags::empty())?;\n let mut context = LookupContext::default();\n assert!(Arc::ptr_eq(\n &ns.root().lookup_child(&current_task, &mut context, b\"foo\")?.entry,\n foofs1.root()\n ));\n\n let foofs2 = TmpFs::new();\n foo_dir.mount(WhatToMount::Fs(foofs2.clone()), MountFlags::empty())?;\n let mut context = LookupContext::default();\n assert!(Arc::ptr_eq(\n &ns.root().lookup_child(&current_task, &mut context, b\"foo\")?.entry,\n foofs2.root()\n ));\n\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_trim_trailing_slashes() {\n assert_eq!(b\"\", trim_trailing_slashes(b\"\"));\n assert_eq!(b\"\", trim_trailing_slashes(b\"/\"));\n assert_eq!(b\"\", trim_trailing_slashes(b\"/////\"));\n assert_eq!(b\"abc\", trim_trailing_slashes(b\"abc\"));\n assert_eq!(b\"abc\", trim_trailing_slashes(b\"abc/\"));\n assert_eq!(b\"abc\", trim_trailing_slashes(b\"abc/////\"));\n assert_eq!(b\"abc///xyz\", trim_trailing_slashes(b\"abc///xyz//\"));\n assert_eq!(b\"abc///xyz\", trim_trailing_slashes(b\"abc///xyz/\"));\n assert_eq!(b\"////abc///xyz\", trim_trailing_slashes(b\"////abc///xyz/\"));\n }\n}" ]
f70679297aaf2fb0b8b08b1c0a1c3250f859750d
355
rs
Rust
tests/spec/non_conformant/misc/lang_bug.rs
becmer/rsass
7ef8b2ce6c283e5c3546640e57e62f306ed96c32
[ "Apache-2.0" ]
329
2017-02-18T12:39:56.000Z
2022-03-31T06:52:18.000Z
tests/spec/non_conformant/misc/lang_bug.rs
becmer/rsass
7ef8b2ce6c283e5c3546640e57e62f306ed96c32
[ "Apache-2.0" ]
73
2017-04-28T19:26:26.000Z
2022-03-05T15:51:09.000Z
tests/spec/non_conformant/misc/lang_bug.rs
becmer/rsass
7ef8b2ce6c283e5c3546640e57e62f306ed96c32
[ "Apache-2.0" ]
27
2017-06-05T23:02:14.000Z
2022-03-28T00:42:52.000Z
//! Tests auto-converted from "sass-spec/spec/non_conformant/misc/lang-bug.hrx" #[allow(unused)] fn runner() -> crate::TestRunner { super::runner() } #[test] fn test() { assert_eq!( runner().ok("div:lang(nb) {\ \n color: red;\ \n}"), "div:lang(nb) {\ \n color: red;\ \n}\n" ); }
18.684211
79
0.487324
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test() {\n assert_eq!(\n runner().ok(\"div:lang(nb) {\\\n \\n color: red;\\\n \\n}\"),\n \"div:lang(nb) {\\\n \\n color: red;\\\n \\n}\\n\"\n );\n}\n}" ]
f706a8d9c10c6727554de6d5839aea4e5f9881ce
9,089
rs
Rust
crossbeam-deque/tests/injector.rs
imotai/crossbeam
78b7ac386fb21edc21f54ff48be47e2d7fedcee8
[ "Apache-2.0", "MIT" ]
null
null
null
crossbeam-deque/tests/injector.rs
imotai/crossbeam
78b7ac386fb21edc21f54ff48be47e2d7fedcee8
[ "Apache-2.0", "MIT" ]
null
null
null
crossbeam-deque/tests/injector.rs
imotai/crossbeam
78b7ac386fb21edc21f54ff48be47e2d7fedcee8
[ "Apache-2.0", "MIT" ]
null
null
null
use std::sync::atomic::Ordering::SeqCst; use std::sync::atomic::{AtomicBool, AtomicUsize}; use std::sync::{Arc, Mutex}; use crossbeam_deque::Steal::{Empty, Success}; use crossbeam_deque::{Injector, Worker}; use crossbeam_utils::thread::scope; use rand::Rng; #[test] fn smoke() { let q = Injector::new(); assert_eq!(q.steal(), Empty); q.push(1); q.push(2); assert_eq!(q.steal(), Success(1)); assert_eq!(q.steal(), Success(2)); assert_eq!(q.steal(), Empty); q.push(3); assert_eq!(q.steal(), Success(3)); assert_eq!(q.steal(), Empty); } #[test] fn is_empty() { let q = Injector::new(); assert!(q.is_empty()); q.push(1); assert!(!q.is_empty()); q.push(2); assert!(!q.is_empty()); let _ = q.steal(); assert!(!q.is_empty()); let _ = q.steal(); assert!(q.is_empty()); q.push(3); assert!(!q.is_empty()); let _ = q.steal(); assert!(q.is_empty()); } #[test] fn spsc() { #[cfg(miri)] const COUNT: usize = 500; #[cfg(not(miri))] const COUNT: usize = 100_000; let q = Injector::new(); scope(|scope| { scope.spawn(|_| { for i in 0..COUNT { loop { if let Success(v) = q.steal() { assert_eq!(i, v); break; } #[cfg(miri)] std::hint::spin_loop(); } } assert_eq!(q.steal(), Empty); }); for i in 0..COUNT { q.push(i); } }) .unwrap(); } #[test] fn mpmc() { #[cfg(miri)] const COUNT: usize = 500; #[cfg(not(miri))] const COUNT: usize = 25_000; const THREADS: usize = 4; let q = Injector::new(); let v = (0..COUNT).map(|_| AtomicUsize::new(0)).collect::<Vec<_>>(); scope(|scope| { for _ in 0..THREADS { scope.spawn(|_| { for i in 0..COUNT { q.push(i); } }); } for _ in 0..THREADS { scope.spawn(|_| { for _ in 0..COUNT { loop { if let Success(n) = q.steal() { v[n].fetch_add(1, SeqCst); break; } #[cfg(miri)] std::hint::spin_loop(); } } }); } }) .unwrap(); for c in v { assert_eq!(c.load(SeqCst), THREADS); } } #[test] fn stampede() { const THREADS: usize = 8; #[cfg(miri)] const COUNT: usize = 500; #[cfg(not(miri))] const COUNT: usize = 50_000; let q = Injector::new(); for i in 0..COUNT { q.push(Box::new(i + 1)); } let remaining = Arc::new(AtomicUsize::new(COUNT)); scope(|scope| { for _ in 0..THREADS { let remaining = remaining.clone(); let q = &q; scope.spawn(move |_| { let mut last = 0; while remaining.load(SeqCst) > 0 { if let Success(x) = q.steal() { assert!(last < *x); last = *x; remaining.fetch_sub(1, SeqCst); } } }); } let mut last = 0; while remaining.load(SeqCst) > 0 { if let Success(x) = q.steal() { assert!(last < *x); last = *x; remaining.fetch_sub(1, SeqCst); } } }) .unwrap(); } #[test] fn stress() { const THREADS: usize = 8; #[cfg(miri)] const COUNT: usize = 500; #[cfg(not(miri))] const COUNT: usize = 50_000; let q = Injector::new(); let done = Arc::new(AtomicBool::new(false)); let hits = Arc::new(AtomicUsize::new(0)); scope(|scope| { for _ in 0..THREADS { let done = done.clone(); let hits = hits.clone(); let q = &q; scope.spawn(move |_| { let w2 = Worker::new_fifo(); while !done.load(SeqCst) { if let Success(_) = q.steal() { hits.fetch_add(1, SeqCst); } let _ = q.steal_batch(&w2); if let Success(_) = q.steal_batch_and_pop(&w2) { hits.fetch_add(1, SeqCst); } while w2.pop().is_some() { hits.fetch_add(1, SeqCst); } } }); } let mut rng = rand::thread_rng(); let mut expected = 0; while expected < COUNT { if rng.gen_range(0..3) == 0 { while let Success(_) = q.steal() { hits.fetch_add(1, SeqCst); } } else { q.push(expected); expected += 1; } } while hits.load(SeqCst) < COUNT { while let Success(_) = q.steal() { hits.fetch_add(1, SeqCst); } } done.store(true, SeqCst); }) .unwrap(); } #[cfg_attr(miri, ignore)] // Miri is too slow #[test] fn no_starvation() { const THREADS: usize = 8; const COUNT: usize = 50_000; let q = Injector::new(); let done = Arc::new(AtomicBool::new(false)); let mut all_hits = Vec::new(); scope(|scope| { for _ in 0..THREADS { let done = done.clone(); let hits = Arc::new(AtomicUsize::new(0)); all_hits.push(hits.clone()); let q = &q; scope.spawn(move |_| { let w2 = Worker::new_fifo(); while !done.load(SeqCst) { if let Success(_) = q.steal() { hits.fetch_add(1, SeqCst); } let _ = q.steal_batch(&w2); if let Success(_) = q.steal_batch_and_pop(&w2) { hits.fetch_add(1, SeqCst); } while w2.pop().is_some() { hits.fetch_add(1, SeqCst); } } }); } let mut rng = rand::thread_rng(); let mut my_hits = 0; loop { for i in 0..rng.gen_range(0..COUNT) { if rng.gen_range(0..3) == 0 && my_hits == 0 { while let Success(_) = q.steal() { my_hits += 1; } } else { q.push(i); } } if my_hits > 0 && all_hits.iter().all(|h| h.load(SeqCst) > 0) { break; } } done.store(true, SeqCst); }) .unwrap(); } #[test] fn destructors() { #[cfg(miri)] const THREADS: usize = 2; #[cfg(not(miri))] const THREADS: usize = 8; #[cfg(miri)] const COUNT: usize = 500; #[cfg(not(miri))] const COUNT: usize = 50_000; #[cfg(miri)] const STEPS: usize = 100; #[cfg(not(miri))] const STEPS: usize = 1000; struct Elem(usize, Arc<Mutex<Vec<usize>>>); impl Drop for Elem { fn drop(&mut self) { self.1.lock().unwrap().push(self.0); } } let q = Injector::new(); let dropped = Arc::new(Mutex::new(Vec::new())); let remaining = Arc::new(AtomicUsize::new(COUNT)); for i in 0..COUNT { q.push(Elem(i, dropped.clone())); } scope(|scope| { for _ in 0..THREADS { let remaining = remaining.clone(); let q = &q; scope.spawn(move |_| { let w2 = Worker::new_fifo(); let mut cnt = 0; while cnt < STEPS { if let Success(_) = q.steal() { cnt += 1; remaining.fetch_sub(1, SeqCst); } let _ = q.steal_batch(&w2); if let Success(_) = q.steal_batch_and_pop(&w2) { cnt += 1; remaining.fetch_sub(1, SeqCst); } while w2.pop().is_some() { cnt += 1; remaining.fetch_sub(1, SeqCst); } } }); } for _ in 0..STEPS { if let Success(_) = q.steal() { remaining.fetch_sub(1, SeqCst); } } }) .unwrap(); let rem = remaining.load(SeqCst); assert!(rem > 0); { let mut v = dropped.lock().unwrap(); assert_eq!(v.len(), COUNT - rem); v.clear(); } drop(q); { let mut v = dropped.lock().unwrap(); assert_eq!(v.len(), rem); v.sort_unstable(); for pair in v.windows(2) { assert_eq!(pair[0] + 1, pair[1]); } } }
24.172872
75
0.413137
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn smoke() {\n let q = Injector::new();\n assert_eq!(q.steal(), Empty);\n\n q.push(1);\n q.push(2);\n assert_eq!(q.steal(), Success(1));\n assert_eq!(q.steal(), Success(2));\n assert_eq!(q.steal(), Empty);\n\n q.push(3);\n assert_eq!(q.steal(), Success(3));\n assert_eq!(q.steal(), Empty);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn is_empty() {\n let q = Injector::new();\n assert!(q.is_empty());\n\n q.push(1);\n assert!(!q.is_empty());\n q.push(2);\n assert!(!q.is_empty());\n\n let _ = q.steal();\n assert!(!q.is_empty());\n let _ = q.steal();\n assert!(q.is_empty());\n\n q.push(3);\n assert!(!q.is_empty());\n let _ = q.steal();\n assert!(q.is_empty());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn spsc() {\n #[cfg(miri)]\n const COUNT: usize = 500;\n #[cfg(not(miri))]\n const COUNT: usize = 100_000;\n\n let q = Injector::new();\n\n scope(|scope| {\n scope.spawn(|_| {\n for i in 0..COUNT {\n loop {\n if let Success(v) = q.steal() {\n assert_eq!(i, v);\n break;\n }\n #[cfg(miri)]\n std::hint::spin_loop();\n }\n }\n\n assert_eq!(q.steal(), Empty);\n });\n\n for i in 0..COUNT {\n q.push(i);\n }\n })\n .unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn mpmc() {\n #[cfg(miri)]\n const COUNT: usize = 500;\n #[cfg(not(miri))]\n const COUNT: usize = 25_000;\n const THREADS: usize = 4;\n\n let q = Injector::new();\n let v = (0..COUNT).map(|_| AtomicUsize::new(0)).collect::<Vec<_>>();\n\n scope(|scope| {\n for _ in 0..THREADS {\n scope.spawn(|_| {\n for i in 0..COUNT {\n q.push(i);\n }\n });\n }\n\n for _ in 0..THREADS {\n scope.spawn(|_| {\n for _ in 0..COUNT {\n loop {\n if let Success(n) = q.steal() {\n v[n].fetch_add(1, SeqCst);\n break;\n }\n #[cfg(miri)]\n std::hint::spin_loop();\n }\n }\n });\n }\n })\n .unwrap();\n\n for c in v {\n assert_eq!(c.load(SeqCst), THREADS);\n }\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn stampede() {\n const THREADS: usize = 8;\n #[cfg(miri)]\n const COUNT: usize = 500;\n #[cfg(not(miri))]\n const COUNT: usize = 50_000;\n\n let q = Injector::new();\n\n for i in 0..COUNT {\n q.push(Box::new(i + 1));\n }\n let remaining = Arc::new(AtomicUsize::new(COUNT));\n\n scope(|scope| {\n for _ in 0..THREADS {\n let remaining = remaining.clone();\n let q = &q;\n\n scope.spawn(move |_| {\n let mut last = 0;\n while remaining.load(SeqCst) > 0 {\n if let Success(x) = q.steal() {\n assert!(last < *x);\n last = *x;\n remaining.fetch_sub(1, SeqCst);\n }\n }\n });\n }\n\n let mut last = 0;\n while remaining.load(SeqCst) > 0 {\n if let Success(x) = q.steal() {\n assert!(last < *x);\n last = *x;\n remaining.fetch_sub(1, SeqCst);\n }\n }\n })\n .unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn stress() {\n const THREADS: usize = 8;\n #[cfg(miri)]\n const COUNT: usize = 500;\n #[cfg(not(miri))]\n const COUNT: usize = 50_000;\n\n let q = Injector::new();\n let done = Arc::new(AtomicBool::new(false));\n let hits = Arc::new(AtomicUsize::new(0));\n\n scope(|scope| {\n for _ in 0..THREADS {\n let done = done.clone();\n let hits = hits.clone();\n let q = &q;\n\n scope.spawn(move |_| {\n let w2 = Worker::new_fifo();\n\n while !done.load(SeqCst) {\n if let Success(_) = q.steal() {\n hits.fetch_add(1, SeqCst);\n }\n\n let _ = q.steal_batch(&w2);\n\n if let Success(_) = q.steal_batch_and_pop(&w2) {\n hits.fetch_add(1, SeqCst);\n }\n\n while w2.pop().is_some() {\n hits.fetch_add(1, SeqCst);\n }\n }\n });\n }\n\n let mut rng = rand::thread_rng();\n let mut expected = 0;\n while expected < COUNT {\n if rng.gen_range(0..3) == 0 {\n while let Success(_) = q.steal() {\n hits.fetch_add(1, SeqCst);\n }\n } else {\n q.push(expected);\n expected += 1;\n }\n }\n\n while hits.load(SeqCst) < COUNT {\n while let Success(_) = q.steal() {\n hits.fetch_add(1, SeqCst);\n }\n }\n done.store(true, SeqCst);\n })\n .unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn no_starvation() {\n const THREADS: usize = 8;\n const COUNT: usize = 50_000;\n\n let q = Injector::new();\n let done = Arc::new(AtomicBool::new(false));\n let mut all_hits = Vec::new();\n\n scope(|scope| {\n for _ in 0..THREADS {\n let done = done.clone();\n let hits = Arc::new(AtomicUsize::new(0));\n all_hits.push(hits.clone());\n let q = &q;\n\n scope.spawn(move |_| {\n let w2 = Worker::new_fifo();\n\n while !done.load(SeqCst) {\n if let Success(_) = q.steal() {\n hits.fetch_add(1, SeqCst);\n }\n\n let _ = q.steal_batch(&w2);\n\n if let Success(_) = q.steal_batch_and_pop(&w2) {\n hits.fetch_add(1, SeqCst);\n }\n\n while w2.pop().is_some() {\n hits.fetch_add(1, SeqCst);\n }\n }\n });\n }\n\n let mut rng = rand::thread_rng();\n let mut my_hits = 0;\n loop {\n for i in 0..rng.gen_range(0..COUNT) {\n if rng.gen_range(0..3) == 0 && my_hits == 0 {\n while let Success(_) = q.steal() {\n my_hits += 1;\n }\n } else {\n q.push(i);\n }\n }\n\n if my_hits > 0 && all_hits.iter().all(|h| h.load(SeqCst) > 0) {\n break;\n }\n }\n done.store(true, SeqCst);\n })\n .unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn destructors() {\n #[cfg(miri)]\n const THREADS: usize = 2;\n #[cfg(not(miri))]\n const THREADS: usize = 8;\n #[cfg(miri)]\n const COUNT: usize = 500;\n #[cfg(not(miri))]\n const COUNT: usize = 50_000;\n #[cfg(miri)]\n const STEPS: usize = 100;\n #[cfg(not(miri))]\n const STEPS: usize = 1000;\n\n struct Elem(usize, Arc<Mutex<Vec<usize>>>);\n\n impl Drop for Elem {\n fn drop(&mut self) {\n self.1.lock().unwrap().push(self.0);\n }\n }\n\n let q = Injector::new();\n let dropped = Arc::new(Mutex::new(Vec::new()));\n let remaining = Arc::new(AtomicUsize::new(COUNT));\n\n for i in 0..COUNT {\n q.push(Elem(i, dropped.clone()));\n }\n\n scope(|scope| {\n for _ in 0..THREADS {\n let remaining = remaining.clone();\n let q = &q;\n\n scope.spawn(move |_| {\n let w2 = Worker::new_fifo();\n let mut cnt = 0;\n\n while cnt < STEPS {\n if let Success(_) = q.steal() {\n cnt += 1;\n remaining.fetch_sub(1, SeqCst);\n }\n\n let _ = q.steal_batch(&w2);\n\n if let Success(_) = q.steal_batch_and_pop(&w2) {\n cnt += 1;\n remaining.fetch_sub(1, SeqCst);\n }\n\n while w2.pop().is_some() {\n cnt += 1;\n remaining.fetch_sub(1, SeqCst);\n }\n }\n });\n }\n\n for _ in 0..STEPS {\n if let Success(_) = q.steal() {\n remaining.fetch_sub(1, SeqCst);\n }\n }\n })\n .unwrap();\n\n let rem = remaining.load(SeqCst);\n assert!(rem > 0);\n\n {\n let mut v = dropped.lock().unwrap();\n assert_eq!(v.len(), COUNT - rem);\n v.clear();\n }\n\n drop(q);\n\n {\n let mut v = dropped.lock().unwrap();\n assert_eq!(v.len(), rem);\n v.sort_unstable();\n for pair in v.windows(2) {\n assert_eq!(pair[0] + 1, pair[1]);\n }\n }\n}\n}" ]
f706b3f3edd6ea69abf947f825933a3e2cc527d0
13,592
rs
Rust
examples/custom_router/router_macro_derive/src/lib.rs
arn-the-long-beard/old_seed_archive
9aed8e64ab6ee5a2a6e9fd650eefb752fcb9144c
[ "MIT" ]
null
null
null
examples/custom_router/router_macro_derive/src/lib.rs
arn-the-long-beard/old_seed_archive
9aed8e64ab6ee5a2a6e9fd650eefb752fcb9144c
[ "MIT" ]
null
null
null
examples/custom_router/router_macro_derive/src/lib.rs
arn-the-long-beard/old_seed_archive
9aed8e64ab6ee5a2a6e9fd650eefb752fcb9144c
[ "MIT" ]
null
null
null
#[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4); } } extern crate convert_case; extern crate proc_macro; extern crate proc_macro_error; use crate::root::get_default_route; use crate::routing::routing_variant_snippets; use proc_macro::TokenStream; use crate::routing_modules::{module_init_snippets, modules_path, modules_snippets}; use proc_macro_error::{abort, proc_macro_error, Diagnostic, Level}; use quote::quote; use syn::{ export::TokenStream2, parse::Result, parse_macro_input, Attribute, Data, DataEnum, DeriveInput, Error, Field, Fields, Ident, Lit, LitStr, Meta, MetaNameValue, Variant, }; mod guard; mod root; mod routing; mod routing_modules; /// Derive an enum as Routing for navigation /// You can change the value of a path for a given route this way /// /// /// /// ```rust /// /// #[derive(Debug, PartialEq, Copy, Clone, AsUrl)] /// pub enum DashboardAdminRoutes { /// #[as_path = "my_stuff"] // "/my_stuff" /// Other, /// #[as_path = ""] /// Root, // "/" /// } /// /// /// fn test_url() { /// let mut query_search: IndexMap<String, String> = IndexMap::new(); /// /// query_search.insert("user".to_string(), "arn".to_string()); /// query_search.insert("role".to_string(), "baby_programmer".to_string()); /// query_search.insert("location".to_string(), "norway".to_string()); /// let url = ExampleRoutes::Admin { /// query: query_search.clone(), /// } /// .to_url(); /// let url_to_compare: Url = "/admin?user=arn&role=baby_programmer&location=norway" /// .parse() /// .unwrap(); /// assert_eq!(url, url_to_compare); /// } /// /// ``` /// #[proc_macro_error] #[proc_macro_derive(AsUrl, attributes(as_path))] pub fn derive_as_url(item: TokenStream) -> TokenStream { let DeriveInput { ident, data, .. } = parse_macro_input!(item as DeriveInput); let variants = match data { Data::Enum(data) => data.variants, _ => abort!(Diagnostic::new( Level::Error, "Can only derive AsPath for enums.".into() )), }; let variants = variants.iter(); let (as_snippets, parse_snippets) = routing_variant_snippets(variants.clone()); let name = ident.to_string(); TokenStream::from(quote! { impl router::Navigation for #ident { fn to_url(&self) -> Url { let url : Url = match self { #(#as_snippets),* }.parse().unwrap(); url } fn from_url(url: Url) -> std::result::Result<Self, ParseError> where Self: Sized + ParsePath { let string_url = url.to_string(); Self::parse_path(&string_url) } } impl AsPath for #ident { fn as_path(self) -> String { match self { #(#as_snippets),* } } } impl router::ParsePath for #ident { fn parse_path(path: &str) -> std::result::Result<Self, ParseError> { let next = path.trim_start_matches("/"); Err(ParseError::NoMatch) #(.or_else(|err| #parse_snippets ) )* .map_err(|err| ParseError::By(#name.to_string(), Box::new(err))) } } }) } fn get_string_from_attribute(attribute_name: &str, attr: &Attribute) -> Result<Option<LitStr>> { if !attr.path.is_ident(attribute_name) { return Ok(None); // not our attribute } match attr.parse_meta()? { Meta::NameValue(MetaNameValue { lit: Lit::Str(name), .. }) => Some(Some(name)), _ => None, } .ok_or_else(|| Error::new_spanned(attr, &format!("expected #[{} = \"...\"]", attribute_name))) } /// Rebuild the content of a variant depending of the fields present in the original enum fn build_structs(structs_tuple: (Option<&Field>, Option<&Field>, Option<&Field>)) -> TokenStream2 { match structs_tuple { (id, query, children) if id.is_some() && query.is_some() && children.is_some() => { quote! { id,query,children} } (id, query, _) if id.is_some() && query.is_some() => { quote! { id, query} } (id, query, children) if id.is_none() && query.is_some() && children.is_some() => { quote! { query , children} } (id, query, children) if id.is_some() && children.is_some() && query.is_none() => { quote! { id, children } } (id, query, children) if id.is_some() && query.is_none() && children.is_none() => { quote! { id } } (id, query, children) if query.is_some() && id.is_none() && children.is_none() => { quote! { query} } (id, query, children) if query.is_none() && id.is_none() & children.is_some() => { quote! { children } } (id, query, children) if query.is_none() && id.is_none() & children.is_none() => { quote! {} } (_, _, _) => { quote! {} } } } /// Assign only the payload defined by the field in the enu, fn build_advanced(structs_tuple: (Option<&Field>, Option<&Field>, Option<&Field>)) -> TokenStream2 { match structs_tuple { (id, query, children) if id.is_some() && query.is_some() && children.is_some() => { let sub_enum = &children.clone().unwrap().ty; quote! { id : id.unwrap(),query : query.unwrap(),children : #sub_enum::parse_path(&children.unwrap()).unwrap()} } (id, query, _) if id.is_some() && query.is_some() => { quote! { id : id.unwrap(),query : query.unwrap()} } (id, query, children) if id.is_none() && query.is_some() && children.is_some() => { let sub_enum = &children.clone().unwrap().ty; quote! { query : query.unwrap(),children : #sub_enum::parse_path(&children.unwrap()).unwrap()} } (id, query, children) if id.is_some() && children.is_some() && query.is_none() => { let sub_enum = &children.clone().unwrap().ty; quote! { id : id.unwrap(),children : #sub_enum::parse_path(&children.unwrap()).unwrap()} } (id, query, children) if id.is_some() && query.is_none() && children.is_none() => { quote! { id : id.unwrap()} } (id, query, children) if query.is_some() && id.is_none() && children.is_none() => { quote! { query : query.unwrap()} } (id, query, children) if query.is_none() && id.is_none() & children.is_some() => { let sub_enum = &children.clone().unwrap().ty; quote! { children :#sub_enum::parse_path(&children.unwrap().clone()).unwrap()} } (_, _, _) => { quote! {} } } } fn build_string_payload(structs_tuple: (Option<&Field>, Option<&Field>, Option<&Field>)) -> String { match structs_tuple { (id, query, children) if id.is_some() && query.is_some() && children.is_some() => { "id,query,children".to_string() } (id, query, _) if id.is_some() && query.is_some() => "id,query".to_string(), (id, query, children) if id.is_none() && query.is_some() && children.is_some() => { "query,children".to_string() } (id, query, children) if id.is_some() && children.is_some() && query.is_none() => { "id,children".to_string() } (id, query, children) if id.is_some() && query.is_none() && children.is_none() => { "id".to_string() } (id, query, children) if query.is_some() && id.is_none() && children.is_none() => { "query".to_string() } (id, query, children) if query.is_none() && id.is_none() & children.is_some() => { "children".to_string() } (id, query, children) if query.is_none() && id.is_none() & children.is_none() => { "".to_string() } (_, _, _) => "".to_string(), } } /// Define a routing config as root for your navigation. /// It will contain the default route used by the router when it cannot find the right url /// ```rust /// /// #[derive(Debug, PartialEq, Copy, Clone, Root)] /// pub enum DashboardAdminRoutes { /// #[default_route] /// NotFound, // -> /blablablalbla -> /not_found /// Root, /// } /// ``` /// #[proc_macro_error] #[proc_macro_derive(Root, attributes(default_route))] pub fn define_as_root(item: TokenStream) -> TokenStream { let DeriveInput { ident, data, .. } = parse_macro_input!(item as DeriveInput); let variants = match data { Data::Enum(data) => data.variants, _ => abort!(Diagnostic::new( Level::Error, "Can only derive AsPath for enums.".into() )), }; let variants = variants.iter(); let default_route = get_default_route(variants.clone()); if default_route.is_err() { abort!(Diagnostic::new( Level::Error, "Could not find default_route".into() )) } let default_variant = default_route.unwrap(); match default_variant.fields { Fields::Named(_) => abort!(Diagnostic::new( Level::Error, "Default route need to be simple".into() )), Fields::Unnamed(_) => abort!(Diagnostic::new( Level::Error, "Default route need to be simple".into() )), Fields::Unit => {} } let default_variant_ident = default_variant.ident; TokenStream::from(quote! { impl Default for #ident { fn default() -> #ident { #ident::#default_variant_ident } } }) } /// The RoutingModule makes the enum variants representing modules loaded by the routes /// By default, an enum variant snake case is equal to its module name /// /// You can rename the path /// You can specify routes that does not load module ( no init, no specific Model & Msg and no view ) /// /// The derive macro will call the init function , Model, Msg, Routes, Update, and View /// /// Todo : /// - Could add as_module /// - Could generate the code for fn update as well ? /// ```rust /// /// /// /// #[derive(Debug, PartialEq, Clone, RoutingModules)] /// pub enum ExampleRoutes { /// // #[as_module= "my_stuff"] // the module is name my_stuff.rs /// Other { /// id: String, /// children: Settings, /// }, /// #[guard = "logged_user => admin_guard => not_authorized_view"] /// Admin { // will load module "admin.rs" /// // will load model.admin and as well /// // equal to /// // #[model_scope = "admin => admin ::init"] will check init has correct arguments /// // #[view_scope = "admin => admin::view"] will check viewt has correct arguments /// query: IndexMap<String, String>, /// }, /// #[guard = "logged_user => user_guard => not_logged_user_view"] /// Dashboard(DashboardRoutes), // will load module "dashboard" /// Profile { // will load module "profile" /// id: String, /// }, /// #[guard = "logged_user => admin_guard => not_authorized_view"] /// #[view = " => my_stuff"] /// MyStuff, /// #[view = " => not_found"] /// #[default_route] /// NotFound, /// #[view = " => home"] /// #[as_path = ""] /// Root, /// } /// /// fn view(model: &Model) -> impl IntoNodes<Msg> { /// vec![ /// header(&model), /// if let Some(route) = &model.router.current_route { /// route.view(model) /// } else { /// home(&model.theme) /// }, /// ] /// } /// /// ``` /// /// #[proc_macro_error] #[proc_macro_derive( RoutingModules, attributes(as_path, view, guard, default_route, modules_path) )] pub fn derive_add_module_load(item: TokenStream) -> TokenStream { let add_url = derive_as_url(item.clone()); let root = define_as_root(item.clone()); let DeriveInput { ident, data, attrs, .. } = parse_macro_input!(item as DeriveInput); let variants = match data { Data::Enum(data) => data.variants, _ => abort!(Diagnostic::new( Level::Error, "Can only derive AsPath for enums.".into() )), }; let url_impl = TokenStream2::from(add_url); let default_route_impl = TokenStream2::from(root); let variants = variants.iter(); let modules_path = modules_path(ident.clone(), attrs.iter()); let modules_snippets = modules_snippets(variants.clone(), modules_path.clone()); let init_snippets = module_init_snippets(variants.clone(), modules_path.clone()); TokenStream::from(quote! { #url_impl #default_route_impl impl router::View<#ident, Model, Msg> for #ident { fn view(&self, scoped_state: &Model) -> Node<Msg> { match self { #(#modules_snippets),* } } } impl router::Init<#ident, Model, Msg> for #ident { fn init<'b, 'c>( &self, previous_state: &'b mut Model, orders: &'c mut impl Orders<Msg>, ) -> &'b mut Model { match self { #(#init_snippets),* } previous_state } } }) }
33.477833
124
0.546424
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn it_works() {\n assert_eq!(2 + 2, 4);\n }\n}" ]
f706e377eb1e76a14386a192293e8caf5261b0d8
978
rs
Rust
tests/ui-tests.rs
PoiScript/sqlx
fb66cfa66563e45a0f0dad3c8fd7dba8a264d346
[ "Apache-2.0", "MIT" ]
null
null
null
tests/ui-tests.rs
PoiScript/sqlx
fb66cfa66563e45a0f0dad3c8fd7dba8a264d346
[ "Apache-2.0", "MIT" ]
null
null
null
tests/ui-tests.rs
PoiScript/sqlx
fb66cfa66563e45a0f0dad3c8fd7dba8a264d346
[ "Apache-2.0", "MIT" ]
null
null
null
#[test] fn ui_tests() { let t = trybuild::TestCases::new(); if cfg!(feature = "postgres") { t.compile_fail("tests/ui/postgres/*.rs"); // UI tests for column types that require gated features if cfg!(not(feature = "chrono")) { t.compile_fail("tests/ui/postgres/gated/chrono.rs"); } if cfg!(not(feature = "uuid")) { t.compile_fail("tests/ui/postgres/gated/uuid.rs"); } if cfg!(not(feature = "ipnetwork")) { t.compile_fail("tests/ui/postgres/gated/ipnetwork.rs"); } } if cfg!(feature = "mysql") { t.compile_fail("tests/ui/mysql/*.rs"); // UI tests for column types that require gated features if cfg!(not(feature = "chrono")) { t.compile_fail("tests/ui/mysql/gated/chrono.rs"); } } if cfg!(feature = "sqlite") { t.compile_fail("tests/ui/sqlite/*.rs"); } t.compile_fail("tests/ui/*.rs"); }
26.432432
67
0.54908
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn ui_tests() {\n let t = trybuild::TestCases::new();\n\n if cfg!(feature = \"postgres\") {\n t.compile_fail(\"tests/ui/postgres/*.rs\");\n\n // UI tests for column types that require gated features\n if cfg!(not(feature = \"chrono\")) {\n t.compile_fail(\"tests/ui/postgres/gated/chrono.rs\");\n }\n\n if cfg!(not(feature = \"uuid\")) {\n t.compile_fail(\"tests/ui/postgres/gated/uuid.rs\");\n }\n\n if cfg!(not(feature = \"ipnetwork\")) {\n t.compile_fail(\"tests/ui/postgres/gated/ipnetwork.rs\");\n }\n }\n\n if cfg!(feature = \"mysql\") {\n t.compile_fail(\"tests/ui/mysql/*.rs\");\n\n // UI tests for column types that require gated features\n if cfg!(not(feature = \"chrono\")) {\n t.compile_fail(\"tests/ui/mysql/gated/chrono.rs\");\n }\n }\n\n if cfg!(feature = \"sqlite\") {\n t.compile_fail(\"tests/ui/sqlite/*.rs\");\n }\n\n t.compile_fail(\"tests/ui/*.rs\");\n}\n}" ]
f70714032223829b5c7e33962e2a9d51f681ce10
3,998
rs
Rust
src/sg_client.rs
peitalin/sendgrid-rs
8324245b0b8258d4ec8772448ad1ebb37de362bc
[ "MIT" ]
null
null
null
src/sg_client.rs
peitalin/sendgrid-rs
8324245b0b8258d4ec8772448ad1ebb37de362bc
[ "MIT" ]
null
null
null
src/sg_client.rs
peitalin/sendgrid-rs
8324245b0b8258d4ec8772448ad1ebb37de362bc
[ "MIT" ]
null
null
null
use std::io::Read; use reqwest::header::{self, HeaderMap, HeaderValue}; use reqwest::Client; use url::form_urlencoded::Serializer; use errors::SendgridResult; use mail::Mail; static API_URL: &'static str = "https://api.sendgrid.com/api/mail.send.json?"; /// This is the struct that allows you to authenticate to the SendGrid API. /// It's only field is the API key which allows you to send messages. #[derive(Clone, Debug)] pub struct SGClient { api_key: String, } // Given a form value and a key, generate the correct key. fn make_form_key(form: &str, key: &str) -> String { let mut value = String::new(); value.push_str(form); value.push('['); value.push_str(key); value.push(']'); value } // Use the URL form encoder to properly generate the body used in the mail send request. fn make_post_body(mut mail_info: Mail) -> SendgridResult<String> { let body = String::new(); let mut encoder = Serializer::new(body); for to in mail_info.to.iter() { encoder.append_pair("to[]", to.address); encoder.append_pair("toname[]", to.name); } for cc in mail_info.cc.iter() { encoder.append_pair("cc[]", &cc); } for bcc in mail_info.bcc.iter() { encoder.append_pair("bcc[]", &bcc); } for (attachment, contents) in &mail_info.attachments { encoder.append_pair(&make_form_key("files", attachment), &contents); } for (id, value) in &mail_info.content { encoder.append_pair(&make_form_key("content", id), &value); } encoder.append_pair("from", &mail_info.from); encoder.append_pair("subject", &mail_info.subject); encoder.append_pair("html", &mail_info.html); encoder.append_pair("text", &mail_info.text); encoder.append_pair("fromname", &mail_info.from_name); encoder.append_pair("replyto", &mail_info.reply_to); encoder.append_pair("date", &mail_info.date); encoder.append_pair("headers", &mail_info.make_header_string()?); encoder.append_pair("x-smtpapi", &mail_info.x_smtpapi); Ok(encoder.finish()) } impl SGClient { /// Makes a new SendGrid cient with the specified API key. pub fn new(key: String) -> SGClient { SGClient { api_key: key } } /// Sends a messages through the SendGrid API. It takes a Mail struct as an /// argument. It returns the string response from the API as JSON. /// It sets the Content-Type to be application/x-www-form-urlencoded. pub fn send(&self, mail_info: Mail) -> SendgridResult<String> { let client = Client::new(); let mut headers = HeaderMap::new(); headers.insert( header::AUTHORIZATION, HeaderValue::from_str(&format!("Bearer {}", self.api_key.clone()))?, ); headers.insert( header::CONTENT_TYPE, HeaderValue::from_static("application/x-www-form-urlencoded"), ); headers.insert(header::USER_AGENT, HeaderValue::from_static("sendgrid-rs")); let post_body = make_post_body(mail_info)?; let mut res = client .post(API_URL) .headers(headers) .body(post_body) .send()?; let mut body = String::new(); res.read_to_string(&mut body)?; Ok(body) } } #[test] fn basic_message_body() { use mail::Destination; let m = Mail::new() .add_to(Destination { address: "test@example.com", name: "Testy mcTestFace", }) .add_from("me@example.com") .add_subject("Test") .add_text("It works"); let body = make_post_body(m); let want = "to%5B%5D=test%40example.com&toname%5B%5D=Testy+mcTestFace&from=me%40example.com&subject=Test&\ html=&text=It+works&fromname=&replyto=&date=&headers=%7B%7D&x-smtpapi="; assert_eq!(body.unwrap(), want); } #[test] fn test_proper_key() { let want = "files[test.jpg]"; let got = make_form_key("files", "test.jpg"); assert_eq!(want, got); }
31.234375
110
0.631316
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn basic_message_body() {\n use mail::Destination;\n\n let m = Mail::new()\n .add_to(Destination {\n address: \"test@example.com\",\n name: \"Testy mcTestFace\",\n })\n .add_from(\"me@example.com\")\n .add_subject(\"Test\")\n .add_text(\"It works\");\n\n let body = make_post_body(m);\n let want = \"to%5B%5D=test%40example.com&toname%5B%5D=Testy+mcTestFace&from=me%40example.com&subject=Test&\\\n html=&text=It+works&fromname=&replyto=&date=&headers=%7B%7D&x-smtpapi=\";\n assert_eq!(body.unwrap(), want);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_proper_key() {\n let want = \"files[test.jpg]\";\n let got = make_form_key(\"files\", \"test.jpg\");\n assert_eq!(want, got);\n}\n}" ]
f70715ed756dca92167299284bec4ed71a4b8605
21,532
rs
Rust
src/libstd/ptr.rs
BurntSushi/rust
30fe55066a29a14fffd2a5f41e0ab21e7056fb34
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
src/libstd/ptr.rs
BurntSushi/rust
30fe55066a29a14fffd2a5f41e0ab21e7056fb34
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
src/libstd/ptr.rs
BurntSushi/rust
30fe55066a29a14fffd2a5f41e0ab21e7056fb34
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Conveniences for working with unsafe pointers, the `*T`, and `*mut T` types. //! //! Working with unsafe pointers in Rust is fairly uncommon, //! and often limited to some narrow use cases: holding //! an unsafe pointer when safe pointers are unsuitable; //! checking for null; and converting back to safe pointers. //! As a result, there is not yet an abundance of library code //! for working with unsafe pointers, and in particular, //! since pointer math is fairly uncommon in Rust, it is not //! all that convenient. //! //! Use the [`null` function](fn.null.html) to create null pointers, //! the [`is_null`](trait.RawPtr.html#tymethod.is_null) //! and [`is_not_null`](trait.RawPtr.html#method.is_not_null) //! methods of the [`RawPtr` trait](trait.RawPtr.html) to check for null. //! The `RawPtr` trait is imported by the prelude, so `is_null` etc. //! work everywhere. //! //! # Common ways to create unsafe pointers //! //! ## 1. Coerce a reference (`&T`) or mutable reference (`&mut T`). //! //! ``` //! let my_num: int = 10; //! let my_num_ptr: *int = &my_num; //! let mut my_speed: int = 88; //! let my_speed_ptr: *mut int = &mut my_speed; //! ``` //! //! This does not take ownership of the original allocation //! and requires no resource management later, //! but you must not use the pointer after its lifetime. //! //! ## 2. Transmute an owned box (`~T`). //! //! The `transmute` function takes, by value, whatever it's given //! and returns it as whatever type is requested, as long as the //! types are the same size. Because `~T` and `*T` have the same //! representation they can be trivially, //! though unsafely, transformed from one type to the other. //! //! ``` //! use std::cast; //! //! unsafe { //! let my_num: ~int = ~10; //! let my_num: *int = cast::transmute(my_num); //! let my_speed: ~int = ~88; //! let my_speed: *mut int = cast::transmute(my_speed); //! //! // By taking ownership of the original `~T` though //! // we are obligated to transmute it back later to be destroyed. //! drop(cast::transmute::<_, ~int>(my_speed)); //! drop(cast::transmute::<_, ~int>(my_num)); //! } //! ``` //! //! Note that here the call to `drop` is for clarity - it indicates //! that we are done with the given value and it should be destroyed. //! //! ## 3. Get it from C. //! //! ``` //! extern crate libc; //! //! use std::mem; //! //! fn main() { //! unsafe { //! let my_num: *mut int = libc::malloc(mem::size_of::<int>() as libc::size_t) as *mut int; //! if my_num.is_null() { //! fail!("failed to allocate memory"); //! } //! libc::free(my_num as *mut libc::c_void); //! } //! } //! ``` //! //! Usually you wouldn't literally use `malloc` and `free` from Rust, //! but C APIs hand out a lot of pointers generally, so are a common source //! of unsafe pointers in Rust. use cast; use clone::Clone; #[cfg(not(test))] use cmp::Equiv; use iter::{range, Iterator}; use mem; use option::{Option, Some, None}; use intrinsics; #[cfg(not(test))] use cmp::{Eq, TotalEq, Ord}; /// Return the offset of the first null pointer in `buf`. #[inline] pub unsafe fn buf_len<T>(buf: **T) -> uint { position(buf, |i| *i == null()) } impl<T> Clone for *T { #[inline] fn clone(&self) -> *T { *self } } impl<T> Clone for *mut T { #[inline] fn clone(&self) -> *mut T { *self } } /// Return the first offset `i` such that `f(buf[i]) == true`. #[inline] pub unsafe fn position<T>(buf: *T, f: |&T| -> bool) -> uint { let mut i = 0; loop { if f(&(*buf.offset(i as int))) { return i; } else { i += 1; } } } /// Create an null pointer. /// /// # Example /// /// ``` /// use std::ptr; /// /// let p: *int = ptr::null(); /// assert!(p.is_null()); /// ``` #[inline] pub fn null<T>() -> *T { 0 as *T } /// Create an unsafe mutable null pointer. /// /// # Example /// /// ``` /// use std::ptr; /// /// let p: *mut int = ptr::mut_null(); /// assert!(p.is_null()); /// ``` #[inline] pub fn mut_null<T>() -> *mut T { 0 as *mut T } /// Copies data from one location to another. /// /// Copies `count` elements (not bytes) from `src` to `dst`. The source /// and destination may overlap. /// /// `copy_memory` is semantically equivalent to C's `memmove`. /// /// # Example /// /// Efficiently create a Rust vector from an unsafe buffer: /// /// ``` /// use std::ptr; /// /// unsafe fn from_buf_raw<T>(ptr: *T, elts: uint) -> Vec<T> { /// let mut dst = Vec::with_capacity(elts); /// dst.set_len(elts); /// ptr::copy_memory(dst.as_mut_ptr(), ptr, elts); /// dst /// } /// ``` /// #[inline] pub unsafe fn copy_memory<T>(dst: *mut T, src: *T, count: uint) { intrinsics::copy_memory(dst, src, count) } /// Copies data from one location to another. /// /// Copies `count` elements (not bytes) from `src` to `dst`. The source /// and destination may *not* overlap. /// /// `copy_nonoverlapping_memory` is semantically equivalent to C's `memcpy`. /// /// # Example /// /// A safe swap function: /// /// ``` /// use std::cast; /// use std::mem; /// use std::ptr; /// /// fn swap<T>(x: &mut T, y: &mut T) { /// unsafe { /// // Give ourselves some scratch space to work with /// let mut t: T = mem::uninit(); /// /// // Perform the swap, `&mut` pointers never alias /// ptr::copy_nonoverlapping_memory(&mut t, &*x, 1); /// ptr::copy_nonoverlapping_memory(x, &*y, 1); /// ptr::copy_nonoverlapping_memory(y, &t, 1); /// /// // y and t now point to the same thing, but we need to completely forget `tmp` /// // because it's no longer relevant. /// cast::forget(t); /// } /// } /// ``` /// /// # Safety Note /// /// If the source and destination overlap then the behavior of this /// function is undefined. #[inline] pub unsafe fn copy_nonoverlapping_memory<T>(dst: *mut T, src: *T, count: uint) { intrinsics::copy_nonoverlapping_memory(dst, src, count) } /// Invokes memset on the specified pointer, setting `count * size_of::<T>()` /// bytes of memory starting at `dst` to `c`. #[inline] pub unsafe fn set_memory<T>(dst: *mut T, c: u8, count: uint) { intrinsics::set_memory(dst, c, count) } /// Zeroes out `count * size_of::<T>` bytes of memory at `dst` #[inline] pub unsafe fn zero_memory<T>(dst: *mut T, count: uint) { set_memory(dst, 0, count); } /// Swap the values at two mutable locations of the same type, without /// deinitialising either. They may overlap. #[inline] pub unsafe fn swap<T>(x: *mut T, y: *mut T) { // Give ourselves some scratch space to work with let mut tmp: T = mem::uninit(); let t: *mut T = &mut tmp; // Perform the swap copy_nonoverlapping_memory(t, &*x, 1); copy_memory(x, &*y, 1); // `x` and `y` may overlap copy_nonoverlapping_memory(y, &*t, 1); // y and t now point to the same thing, but we need to completely forget `tmp` // because it's no longer relevant. cast::forget(tmp); } /// Replace the value at a mutable location with a new one, returning the old /// value, without deinitialising either. #[inline] pub unsafe fn replace<T>(dest: *mut T, mut src: T) -> T { mem::swap(cast::transmute(dest), &mut src); // cannot overlap src } /// Reads the value from `*src` and returns it. #[inline(always)] pub unsafe fn read<T>(src: *T) -> T { let mut tmp: T = mem::uninit(); copy_nonoverlapping_memory(&mut tmp, src, 1); tmp } /// Reads the value from `*src` and nulls it out. /// This currently prevents destructors from executing. #[inline(always)] pub unsafe fn read_and_zero<T>(dest: *mut T) -> T { // Copy the data out from `dest`: let tmp = read(&*dest); // Now zero out `dest`: zero_memory(dest, 1); tmp } /// Given a **T (pointer to an array of pointers), /// iterate through each *T, up to the provided `len`, /// passing to the provided callback function pub unsafe fn array_each_with_len<T>(arr: **T, len: uint, cb: |*T|) { if arr.is_null() { fail!("ptr::array_each_with_len failure: arr input is null pointer"); } //let start_ptr = *arr; for e in range(0, len) { let n = arr.offset(e as int); cb(*n); } } /// Given a null-pointer-terminated **T (pointer to /// an array of pointers), iterate through each *T, /// passing to the provided callback function /// /// # Safety Note /// /// This will only work with a null-terminated /// pointer array. pub unsafe fn array_each<T>(arr: **T, cb: |*T|) { if arr.is_null() { fail!("ptr::array_each_with_len failure: arr input is null pointer"); } let len = buf_len(arr); array_each_with_len(arr, len, cb); } /// Extension methods for raw pointers. pub trait RawPtr<T> { /// Returns the null pointer. fn null() -> Self; /// Returns true if the pointer is equal to the null pointer. fn is_null(&self) -> bool; /// Returns true if the pointer is not equal to the null pointer. fn is_not_null(&self) -> bool { !self.is_null() } /// Returns the value of this pointer (ie, the address it points to) fn to_uint(&self) -> uint; /// Returns `None` if the pointer is null, or else returns the value wrapped /// in `Some`. /// /// # Safety Notes /// /// While this method is useful for null-safety, it is important to note /// that this is still an unsafe operation because the returned value could /// be pointing to invalid memory. unsafe fn to_option(&self) -> Option<&T>; /// Calculates the offset from a pointer. The offset *must* be in-bounds of /// the object, or one-byte-past-the-end. `count` is in units of T; e.g. a /// `count` of 3 represents a pointer offset of `3 * sizeof::<T>()` bytes. unsafe fn offset(self, count: int) -> Self; } impl<T> RawPtr<T> for *T { #[inline] fn null() -> *T { null() } #[inline] fn is_null(&self) -> bool { *self == RawPtr::null() } #[inline] fn to_uint(&self) -> uint { *self as uint } #[inline] unsafe fn offset(self, count: int) -> *T { intrinsics::offset(self, count) } #[inline] unsafe fn to_option(&self) -> Option<&T> { if self.is_null() { None } else { Some(cast::transmute(*self)) } } } impl<T> RawPtr<T> for *mut T { #[inline] fn null() -> *mut T { mut_null() } #[inline] fn is_null(&self) -> bool { *self == RawPtr::null() } #[inline] fn to_uint(&self) -> uint { *self as uint } #[inline] unsafe fn offset(self, count: int) -> *mut T { intrinsics::offset(self as *T, count) as *mut T } #[inline] unsafe fn to_option(&self) -> Option<&T> { if self.is_null() { None } else { Some(cast::transmute(*self)) } } } // Equality for pointers #[cfg(not(test))] impl<T> Eq for *T { #[inline] fn eq(&self, other: &*T) -> bool { *self == *other } #[inline] fn ne(&self, other: &*T) -> bool { !self.eq(other) } } #[cfg(not(test))] impl<T> TotalEq for *T {} #[cfg(not(test))] impl<T> Eq for *mut T { #[inline] fn eq(&self, other: &*mut T) -> bool { *self == *other } #[inline] fn ne(&self, other: &*mut T) -> bool { !self.eq(other) } } #[cfg(not(test))] impl<T> TotalEq for *mut T {} // Equivalence for pointers #[cfg(not(test))] impl<T> Equiv<*mut T> for *T { fn equiv(&self, other: &*mut T) -> bool { self.to_uint() == other.to_uint() } } #[cfg(not(test))] impl<T> Equiv<*T> for *mut T { fn equiv(&self, other: &*T) -> bool { self.to_uint() == other.to_uint() } } // Equality for extern "C" fn pointers #[cfg(not(test))] mod externfnpointers { use cast; use cmp::Eq; impl<_R> Eq for extern "C" fn() -> _R { #[inline] fn eq(&self, other: &extern "C" fn() -> _R) -> bool { let self_: *() = unsafe { cast::transmute(*self) }; let other_: *() = unsafe { cast::transmute(*other) }; self_ == other_ } #[inline] fn ne(&self, other: &extern "C" fn() -> _R) -> bool { !self.eq(other) } } macro_rules! fnptreq( ($($p:ident),*) => { impl<_R,$($p),*> Eq for extern "C" fn($($p),*) -> _R { #[inline] fn eq(&self, other: &extern "C" fn($($p),*) -> _R) -> bool { let self_: *() = unsafe { cast::transmute(*self) }; let other_: *() = unsafe { cast::transmute(*other) }; self_ == other_ } #[inline] fn ne(&self, other: &extern "C" fn($($p),*) -> _R) -> bool { !self.eq(other) } } } ) fnptreq!(A) fnptreq!(A,B) fnptreq!(A,B,C) fnptreq!(A,B,C,D) fnptreq!(A,B,C,D,E) } // Comparison for pointers #[cfg(not(test))] impl<T> Ord for *T { #[inline] fn lt(&self, other: &*T) -> bool { *self < *other } #[inline] fn le(&self, other: &*T) -> bool { *self <= *other } #[inline] fn ge(&self, other: &*T) -> bool { *self >= *other } #[inline] fn gt(&self, other: &*T) -> bool { *self > *other } } #[cfg(not(test))] impl<T> Ord for *mut T { #[inline] fn lt(&self, other: &*mut T) -> bool { *self < *other } #[inline] fn le(&self, other: &*mut T) -> bool { *self <= *other } #[inline] fn ge(&self, other: &*mut T) -> bool { *self >= *other } #[inline] fn gt(&self, other: &*mut T) -> bool { *self > *other } } #[cfg(test)] pub mod ptr_tests { use super::*; use prelude::*; use c_str::ToCStr; use cast; use libc; use str; use slice::{ImmutableVector, MutableVector}; #[test] fn test() { unsafe { struct Pair { fst: int, snd: int }; let mut p = Pair {fst: 10, snd: 20}; let pptr: *mut Pair = &mut p; let iptr: *mut int = cast::transmute(pptr); assert_eq!(*iptr, 10); *iptr = 30; assert_eq!(*iptr, 30); assert_eq!(p.fst, 30); *pptr = Pair {fst: 50, snd: 60}; assert_eq!(*iptr, 50); assert_eq!(p.fst, 50); assert_eq!(p.snd, 60); let v0 = ~[32000u16, 32001u16, 32002u16]; let mut v1 = ~[0u16, 0u16, 0u16]; copy_memory(v1.as_mut_ptr().offset(1), v0.as_ptr().offset(1), 1); assert!((v1[0] == 0u16 && v1[1] == 32001u16 && v1[2] == 0u16)); copy_memory(v1.as_mut_ptr(), v0.as_ptr().offset(2), 1); assert!((v1[0] == 32002u16 && v1[1] == 32001u16 && v1[2] == 0u16)); copy_memory(v1.as_mut_ptr().offset(2), v0.as_ptr(), 1u); assert!((v1[0] == 32002u16 && v1[1] == 32001u16 && v1[2] == 32000u16)); } } #[test] fn test_position() { use libc::c_char; "hello".with_c_str(|p| { unsafe { assert!(2u == position(p, |c| *c == 'l' as c_char)); assert!(4u == position(p, |c| *c == 'o' as c_char)); assert!(5u == position(p, |c| *c == 0 as c_char)); } }) } #[test] fn test_buf_len() { "hello".with_c_str(|p0| { "there".with_c_str(|p1| { "thing".with_c_str(|p2| { let v = ~[p0, p1, p2, null()]; unsafe { assert_eq!(buf_len(v.as_ptr()), 3u); } }) }) }) } #[test] fn test_is_null() { let p: *int = null(); assert!(p.is_null()); assert!(!p.is_not_null()); let q = unsafe { p.offset(1) }; assert!(!q.is_null()); assert!(q.is_not_null()); let mp: *mut int = mut_null(); assert!(mp.is_null()); assert!(!mp.is_not_null()); let mq = unsafe { mp.offset(1) }; assert!(!mq.is_null()); assert!(mq.is_not_null()); } #[test] fn test_to_option() { unsafe { let p: *int = null(); assert_eq!(p.to_option(), None); let q: *int = &2; assert_eq!(q.to_option().unwrap(), &2); let p: *mut int = mut_null(); assert_eq!(p.to_option(), None); let q: *mut int = &mut 2; assert_eq!(q.to_option().unwrap(), &2); } } #[test] fn test_ptr_addition() { unsafe { let xs = ~[5, ..16]; let mut ptr = xs.as_ptr(); let end = ptr.offset(16); while ptr < end { assert_eq!(*ptr, 5); ptr = ptr.offset(1); } let mut xs_mut = xs.clone(); let mut m_ptr = xs_mut.as_mut_ptr(); let m_end = m_ptr.offset(16); while m_ptr < m_end { *m_ptr += 5; m_ptr = m_ptr.offset(1); } assert_eq!(xs_mut, ~[10, ..16]); } } #[test] fn test_ptr_subtraction() { unsafe { let xs = ~[0,1,2,3,4,5,6,7,8,9]; let mut idx = 9i8; let ptr = xs.as_ptr(); while idx >= 0i8 { assert_eq!(*(ptr.offset(idx as int)), idx as int); idx = idx - 1i8; } let mut xs_mut = xs.clone(); let m_start = xs_mut.as_mut_ptr(); let mut m_ptr = m_start.offset(9); while m_ptr >= m_start { *m_ptr += *m_ptr; m_ptr = m_ptr.offset(-1); } assert_eq!(xs_mut, ~[0,2,4,6,8,10,12,14,16,18]); } } #[test] fn test_ptr_array_each_with_len() { unsafe { let one = "oneOne".to_c_str(); let two = "twoTwo".to_c_str(); let three = "threeThree".to_c_str(); let arr = ~[ one.with_ref(|buf| buf), two.with_ref(|buf| buf), three.with_ref(|buf| buf), ]; let expected_arr = [ one, two, three ]; let mut ctr = 0; let mut iteration_count = 0; array_each_with_len(arr.as_ptr(), arr.len(), |e| { let actual = str::raw::from_c_str(e); let expected = expected_arr[ctr].with_ref(|buf| { str::raw::from_c_str(buf) }); debug!( "test_ptr_array_each_with_len e: {}, a: {}", expected, actual); assert_eq!(actual, expected); ctr += 1; iteration_count += 1; }); assert_eq!(iteration_count, 3u); } } #[test] fn test_ptr_array_each() { unsafe { let one = "oneOne".to_c_str(); let two = "twoTwo".to_c_str(); let three = "threeThree".to_c_str(); let arr = ~[ one.with_ref(|buf| buf), two.with_ref(|buf| buf), three.with_ref(|buf| buf), // fake a null terminator null(), ]; let expected_arr = [ one, two, three ]; let arr_ptr = arr.as_ptr(); let mut ctr = 0; let mut iteration_count = 0; array_each(arr_ptr, |e| { let actual = str::raw::from_c_str(e); let expected = expected_arr[ctr].with_ref(|buf| { str::raw::from_c_str(buf) }); debug!( "test_ptr_array_each e: {}, a: {}", expected, actual); assert_eq!(actual, expected); ctr += 1; iteration_count += 1; }); assert_eq!(iteration_count, 3); } } #[test] #[should_fail] fn test_ptr_array_each_with_len_null_ptr() { unsafe { array_each_with_len(0 as **libc::c_char, 1, |e| { str::raw::from_c_str(e); }); } } #[test] #[should_fail] fn test_ptr_array_each_null_ptr() { unsafe { array_each(0 as **libc::c_char, |e| { str::raw::from_c_str(e); }); } } #[test] fn test_set_memory() { let mut xs = [0u8, ..20]; let ptr = xs.as_mut_ptr(); unsafe { set_memory(ptr, 5u8, xs.len()); } assert!(xs == [5u8, ..20]); } }
27.85511
100
0.51528
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test() {\n unsafe {\n struct Pair {\n fst: int,\n snd: int\n };\n let mut p = Pair {fst: 10, snd: 20};\n let pptr: *mut Pair = &mut p;\n let iptr: *mut int = cast::transmute(pptr);\n assert_eq!(*iptr, 10);\n *iptr = 30;\n assert_eq!(*iptr, 30);\n assert_eq!(p.fst, 30);\n\n *pptr = Pair {fst: 50, snd: 60};\n assert_eq!(*iptr, 50);\n assert_eq!(p.fst, 50);\n assert_eq!(p.snd, 60);\n\n let v0 = ~[32000u16, 32001u16, 32002u16];\n let mut v1 = ~[0u16, 0u16, 0u16];\n\n copy_memory(v1.as_mut_ptr().offset(1),\n v0.as_ptr().offset(1), 1);\n assert!((v1[0] == 0u16 && v1[1] == 32001u16 && v1[2] == 0u16));\n copy_memory(v1.as_mut_ptr(),\n v0.as_ptr().offset(2), 1);\n assert!((v1[0] == 32002u16 && v1[1] == 32001u16 &&\n v1[2] == 0u16));\n copy_memory(v1.as_mut_ptr().offset(2),\n v0.as_ptr(), 1u);\n assert!((v1[0] == 32002u16 && v1[1] == 32001u16 &&\n v1[2] == 32000u16));\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_position() {\n use libc::c_char;\n\n \"hello\".with_c_str(|p| {\n unsafe {\n assert!(2u == position(p, |c| *c == 'l' as c_char));\n assert!(4u == position(p, |c| *c == 'o' as c_char));\n assert!(5u == position(p, |c| *c == 0 as c_char));\n }\n })\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_buf_len() {\n \"hello\".with_c_str(|p0| {\n \"there\".with_c_str(|p1| {\n \"thing\".with_c_str(|p2| {\n let v = ~[p0, p1, p2, null()];\n unsafe {\n assert_eq!(buf_len(v.as_ptr()), 3u);\n }\n })\n })\n })\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_is_null() {\n let p: *int = null();\n assert!(p.is_null());\n assert!(!p.is_not_null());\n\n let q = unsafe { p.offset(1) };\n assert!(!q.is_null());\n assert!(q.is_not_null());\n\n let mp: *mut int = mut_null();\n assert!(mp.is_null());\n assert!(!mp.is_not_null());\n\n let mq = unsafe { mp.offset(1) };\n assert!(!mq.is_null());\n assert!(mq.is_not_null());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_to_option() {\n unsafe {\n let p: *int = null();\n assert_eq!(p.to_option(), None);\n\n let q: *int = &2;\n assert_eq!(q.to_option().unwrap(), &2);\n\n let p: *mut int = mut_null();\n assert_eq!(p.to_option(), None);\n\n let q: *mut int = &mut 2;\n assert_eq!(q.to_option().unwrap(), &2);\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_ptr_addition() {\n unsafe {\n let xs = ~[5, ..16];\n let mut ptr = xs.as_ptr();\n let end = ptr.offset(16);\n\n while ptr < end {\n assert_eq!(*ptr, 5);\n ptr = ptr.offset(1);\n }\n\n let mut xs_mut = xs.clone();\n let mut m_ptr = xs_mut.as_mut_ptr();\n let m_end = m_ptr.offset(16);\n\n while m_ptr < m_end {\n *m_ptr += 5;\n m_ptr = m_ptr.offset(1);\n }\n\n assert_eq!(xs_mut, ~[10, ..16]);\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_ptr_subtraction() {\n unsafe {\n let xs = ~[0,1,2,3,4,5,6,7,8,9];\n let mut idx = 9i8;\n let ptr = xs.as_ptr();\n\n while idx >= 0i8 {\n assert_eq!(*(ptr.offset(idx as int)), idx as int);\n idx = idx - 1i8;\n }\n\n let mut xs_mut = xs.clone();\n let m_start = xs_mut.as_mut_ptr();\n let mut m_ptr = m_start.offset(9);\n\n while m_ptr >= m_start {\n *m_ptr += *m_ptr;\n m_ptr = m_ptr.offset(-1);\n }\n\n assert_eq!(xs_mut, ~[0,2,4,6,8,10,12,14,16,18]);\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_ptr_array_each_with_len() {\n unsafe {\n let one = \"oneOne\".to_c_str();\n let two = \"twoTwo\".to_c_str();\n let three = \"threeThree\".to_c_str();\n let arr = ~[\n one.with_ref(|buf| buf),\n two.with_ref(|buf| buf),\n three.with_ref(|buf| buf),\n ];\n let expected_arr = [\n one, two, three\n ];\n\n let mut ctr = 0;\n let mut iteration_count = 0;\n array_each_with_len(arr.as_ptr(), arr.len(), |e| {\n let actual = str::raw::from_c_str(e);\n let expected = expected_arr[ctr].with_ref(|buf| {\n str::raw::from_c_str(buf)\n });\n debug!(\n \"test_ptr_array_each_with_len e: {}, a: {}\",\n expected, actual);\n assert_eq!(actual, expected);\n ctr += 1;\n iteration_count += 1;\n });\n assert_eq!(iteration_count, 3u);\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_ptr_array_each() {\n unsafe {\n let one = \"oneOne\".to_c_str();\n let two = \"twoTwo\".to_c_str();\n let three = \"threeThree\".to_c_str();\n let arr = ~[\n one.with_ref(|buf| buf),\n two.with_ref(|buf| buf),\n three.with_ref(|buf| buf),\n // fake a null terminator\n null(),\n ];\n let expected_arr = [\n one, two, three\n ];\n\n let arr_ptr = arr.as_ptr();\n let mut ctr = 0;\n let mut iteration_count = 0;\n array_each(arr_ptr, |e| {\n let actual = str::raw::from_c_str(e);\n let expected = expected_arr[ctr].with_ref(|buf| {\n str::raw::from_c_str(buf)\n });\n debug!(\n \"test_ptr_array_each e: {}, a: {}\",\n expected, actual);\n assert_eq!(actual, expected);\n ctr += 1;\n iteration_count += 1;\n });\n assert_eq!(iteration_count, 3);\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_ptr_array_each_with_len_null_ptr() {\n unsafe {\n array_each_with_len(0 as **libc::c_char, 1, |e| {\n str::raw::from_c_str(e);\n });\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_ptr_array_each_null_ptr() {\n unsafe {\n array_each(0 as **libc::c_char, |e| {\n str::raw::from_c_str(e);\n });\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_set_memory() {\n let mut xs = [0u8, ..20];\n let ptr = xs.as_mut_ptr();\n unsafe { set_memory(ptr, 5u8, xs.len()); }\n assert!(xs == [5u8, ..20]);\n }\n}" ]
f707710cf25cbc9e8b44a56ce68a941334d997f8
9,511
rs
Rust
object_store/src/path/parsed.rs
jacobmarble/influxdb_iox
9868e18d0accc8d08eaa2ca87b6380ac45cf9078
[ "Apache-2.0", "MIT" ]
null
null
null
object_store/src/path/parsed.rs
jacobmarble/influxdb_iox
9868e18d0accc8d08eaa2ca87b6380ac45cf9078
[ "Apache-2.0", "MIT" ]
null
null
null
object_store/src/path/parsed.rs
jacobmarble/influxdb_iox
9868e18d0accc8d08eaa2ca87b6380ac45cf9078
[ "Apache-2.0", "MIT" ]
null
null
null
use super::{ObjectStorePath, PathPart, DELIMITER}; use itertools::Itertools; /// A path stored as a collection of 0 or more directories and 0 or 1 file name #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Default)] pub struct DirsAndFileName { pub(crate) directories: Vec<PathPart>, pub(crate) file_name: Option<PathPart>, } impl ObjectStorePath for DirsAndFileName { fn set_file_name(&mut self, part: impl Into<String>) { let part = part.into(); self.file_name = Some((&*part).into()); } fn push_dir(&mut self, part: impl Into<String>) { let part = part.into(); self.directories.push((&*part).into()); } fn push_all_dirs<'a>(&mut self, parts: impl AsRef<[&'a str]>) { self.directories .extend(parts.as_ref().iter().map(|&v| v.into())); } fn display(&self) -> String { let mut s = self .directories .iter() .map(PathPart::encoded) .join(DELIMITER); if !s.is_empty() { s.push_str(DELIMITER); } if let Some(file_name) = &self.file_name { s.push_str(file_name.encoded()); } s } } impl DirsAndFileName { pub(crate) fn prefix_matches(&self, prefix: &Self) -> bool { let diff = itertools::diff_with( self.directories.iter(), prefix.directories.iter(), |a, b| a == b, ); use itertools::Diff; match diff { None => match (self.file_name.as_ref(), prefix.file_name.as_ref()) { (Some(self_file), Some(prefix_file)) => { self_file.encoded().starts_with(prefix_file.encoded()) } (Some(_self_file), None) => true, (None, Some(_prefix_file)) => false, (None, None) => true, }, Some(Diff::Shorter(_, mut remaining_self)) => { let next_dir = remaining_self .next() .expect("must have at least one mismatch to be in this case"); match prefix.file_name.as_ref() { Some(prefix_file) => next_dir.encoded().starts_with(prefix_file.encoded()), None => true, } } Some(Diff::FirstMismatch(_, mut remaining_self, mut remaining_prefix)) => { let first_prefix = remaining_prefix .next() .expect("must have at least one mismatch to be in this case"); // There must not be any other remaining parts in the prefix remaining_prefix.next().is_none() // and the next item in self must start with the last item in the prefix && remaining_self .next() .expect("must be at least one value") .encoded() .starts_with(first_prefix.encoded()) } _ => false, } } /// Returns all directory and file name `PathParts` in `self` after the /// specified `prefix`. Ignores any `file_name` part of `prefix`. /// Returns `None` if `self` dosen't start with `prefix`. pub(crate) fn parts_after_prefix(&self, prefix: &Self) -> Option<Vec<PathPart>> { let mut dirs_iter = self.directories.iter(); let mut prefix_dirs_iter = prefix.directories.iter(); let mut parts = vec![]; for dir in &mut dirs_iter { let pre = prefix_dirs_iter.next(); match pre { None => { parts.push(dir.to_owned()); break; } Some(p) if p == dir => continue, Some(_) => return None, } } parts.extend(dirs_iter.cloned()); if let Some(file_name) = &self.file_name { parts.push(file_name.to_owned()); } Some(parts) } /// Add a `PathPart` to the end of the path's directories. pub(crate) fn push_part_as_dir(&mut self, part: &PathPart) { self.directories.push(part.to_owned()); } /// Remove the file name, if any. pub(crate) fn unset_file_name(&mut self) { self.file_name = None; } } #[cfg(test)] mod tests { use super::*; #[test] fn parts_after_prefix_behavior() { let mut existing_path = DirsAndFileName::default(); existing_path.push_all_dirs(&["apple", "bear", "cow", "dog"]); existing_path.file_name = Some("egg.json".into()); // Prefix with one directory let mut prefix = DirsAndFileName::default(); prefix.push_dir("apple"); let expected_parts: Vec<PathPart> = vec!["bear", "cow", "dog", "egg.json"] .into_iter() .map(Into::into) .collect(); let parts = existing_path.parts_after_prefix(&prefix).unwrap(); assert_eq!(parts, expected_parts); // Prefix with two directories let mut prefix = DirsAndFileName::default(); prefix.push_all_dirs(&["apple", "bear"]); let expected_parts: Vec<PathPart> = vec!["cow", "dog", "egg.json"] .into_iter() .map(Into::into) .collect(); let parts = existing_path.parts_after_prefix(&prefix).unwrap(); assert_eq!(parts, expected_parts); // Not a prefix let mut prefix = DirsAndFileName::default(); prefix.push_dir("cow"); assert!(existing_path.parts_after_prefix(&prefix).is_none()); // Prefix with a partial directory let mut prefix = DirsAndFileName::default(); prefix.push_dir("ap"); assert!(existing_path.parts_after_prefix(&prefix).is_none()); // Prefix matches but there aren't any parts after it let mut existing_path = DirsAndFileName::default(); existing_path.push_all_dirs(&["apple", "bear", "cow", "dog"]); let prefix = existing_path.clone(); let parts = existing_path.parts_after_prefix(&prefix).unwrap(); assert!(parts.is_empty()); } #[test] fn prefix_matches() { let mut haystack = DirsAndFileName::default(); haystack.push_all_dirs(&["foo/bar", "baz%2Ftest", "something"]); // self starts with self assert!( haystack.prefix_matches(&haystack), "{:?} should have started with {:?}", haystack, haystack ); // a longer prefix doesn't match let mut needle = haystack.clone(); needle.push_dir("longer now"); assert!( !haystack.prefix_matches(&needle), "{:?} shouldn't have started with {:?}", haystack, needle ); // one dir prefix matches let mut needle = DirsAndFileName::default(); needle.push_dir("foo/bar"); assert!( haystack.prefix_matches(&needle), "{:?} should have started with {:?}", haystack, needle ); // two dir prefix matches needle.push_dir("baz%2Ftest"); assert!( haystack.prefix_matches(&needle), "{:?} should have started with {:?}", haystack, needle ); // partial dir prefix matches let mut needle = DirsAndFileName::default(); needle.push_dir("f"); assert!( haystack.prefix_matches(&needle), "{:?} should have started with {:?}", haystack, needle ); // one dir and one partial dir matches let mut needle = DirsAndFileName::default(); needle.push_all_dirs(&["foo/bar", "baz"]); assert!( haystack.prefix_matches(&needle), "{:?} should have started with {:?}", haystack, needle ); } #[test] fn prefix_matches_with_file_name() { let mut haystack = DirsAndFileName::default(); haystack.push_all_dirs(&["foo/bar", "baz%2Ftest", "something"]); let mut needle = haystack.clone(); // All directories match and file name is a prefix haystack.set_file_name("foo.segment"); needle.set_file_name("foo"); assert!( haystack.prefix_matches(&needle), "{:?} should have started with {:?}", haystack, needle ); // All directories match but file name is not a prefix needle.set_file_name("e"); assert!( !haystack.prefix_matches(&needle), "{:?} should not have started with {:?}", haystack, needle ); // Not all directories match; file name is a prefix of the next directory; this // matches let mut needle = DirsAndFileName::default(); needle.push_all_dirs(&["foo/bar", "baz%2Ftest"]); needle.set_file_name("s"); assert!( haystack.prefix_matches(&needle), "{:?} should have started with {:?}", haystack, needle ); // Not all directories match; file name is NOT a prefix of the next directory; // no match needle.set_file_name("p"); assert!( !haystack.prefix_matches(&needle), "{:?} should not have started with {:?}", haystack, needle ); } }
32.131757
95
0.533908
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn parts_after_prefix_behavior() {\n let mut existing_path = DirsAndFileName::default();\n existing_path.push_all_dirs(&[\"apple\", \"bear\", \"cow\", \"dog\"]);\n existing_path.file_name = Some(\"egg.json\".into());\n\n // Prefix with one directory\n let mut prefix = DirsAndFileName::default();\n prefix.push_dir(\"apple\");\n let expected_parts: Vec<PathPart> = vec![\"bear\", \"cow\", \"dog\", \"egg.json\"]\n .into_iter()\n .map(Into::into)\n .collect();\n let parts = existing_path.parts_after_prefix(&prefix).unwrap();\n assert_eq!(parts, expected_parts);\n\n // Prefix with two directories\n let mut prefix = DirsAndFileName::default();\n prefix.push_all_dirs(&[\"apple\", \"bear\"]);\n let expected_parts: Vec<PathPart> = vec![\"cow\", \"dog\", \"egg.json\"]\n .into_iter()\n .map(Into::into)\n .collect();\n let parts = existing_path.parts_after_prefix(&prefix).unwrap();\n assert_eq!(parts, expected_parts);\n\n // Not a prefix\n let mut prefix = DirsAndFileName::default();\n prefix.push_dir(\"cow\");\n assert!(existing_path.parts_after_prefix(&prefix).is_none());\n\n // Prefix with a partial directory\n let mut prefix = DirsAndFileName::default();\n prefix.push_dir(\"ap\");\n assert!(existing_path.parts_after_prefix(&prefix).is_none());\n\n // Prefix matches but there aren't any parts after it\n let mut existing_path = DirsAndFileName::default();\n existing_path.push_all_dirs(&[\"apple\", \"bear\", \"cow\", \"dog\"]);\n let prefix = existing_path.clone();\n let parts = existing_path.parts_after_prefix(&prefix).unwrap();\n assert!(parts.is_empty());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn prefix_matches() {\n let mut haystack = DirsAndFileName::default();\n haystack.push_all_dirs(&[\"foo/bar\", \"baz%2Ftest\", \"something\"]);\n\n // self starts with self\n assert!(\n haystack.prefix_matches(&haystack),\n \"{:?} should have started with {:?}\",\n haystack,\n haystack\n );\n\n // a longer prefix doesn't match\n let mut needle = haystack.clone();\n needle.push_dir(\"longer now\");\n assert!(\n !haystack.prefix_matches(&needle),\n \"{:?} shouldn't have started with {:?}\",\n haystack,\n needle\n );\n\n // one dir prefix matches\n let mut needle = DirsAndFileName::default();\n needle.push_dir(\"foo/bar\");\n assert!(\n haystack.prefix_matches(&needle),\n \"{:?} should have started with {:?}\",\n haystack,\n needle\n );\n\n // two dir prefix matches\n needle.push_dir(\"baz%2Ftest\");\n assert!(\n haystack.prefix_matches(&needle),\n \"{:?} should have started with {:?}\",\n haystack,\n needle\n );\n\n // partial dir prefix matches\n let mut needle = DirsAndFileName::default();\n needle.push_dir(\"f\");\n assert!(\n haystack.prefix_matches(&needle),\n \"{:?} should have started with {:?}\",\n haystack,\n needle\n );\n\n // one dir and one partial dir matches\n let mut needle = DirsAndFileName::default();\n needle.push_all_dirs(&[\"foo/bar\", \"baz\"]);\n assert!(\n haystack.prefix_matches(&needle),\n \"{:?} should have started with {:?}\",\n haystack,\n needle\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn prefix_matches_with_file_name() {\n let mut haystack = DirsAndFileName::default();\n haystack.push_all_dirs(&[\"foo/bar\", \"baz%2Ftest\", \"something\"]);\n\n let mut needle = haystack.clone();\n\n // All directories match and file name is a prefix\n haystack.set_file_name(\"foo.segment\");\n needle.set_file_name(\"foo\");\n\n assert!(\n haystack.prefix_matches(&needle),\n \"{:?} should have started with {:?}\",\n haystack,\n needle\n );\n\n // All directories match but file name is not a prefix\n needle.set_file_name(\"e\");\n\n assert!(\n !haystack.prefix_matches(&needle),\n \"{:?} should not have started with {:?}\",\n haystack,\n needle\n );\n\n // Not all directories match; file name is a prefix of the next directory; this\n // matches\n let mut needle = DirsAndFileName::default();\n needle.push_all_dirs(&[\"foo/bar\", \"baz%2Ftest\"]);\n needle.set_file_name(\"s\");\n\n assert!(\n haystack.prefix_matches(&needle),\n \"{:?} should have started with {:?}\",\n haystack,\n needle\n );\n\n // Not all directories match; file name is NOT a prefix of the next directory;\n // no match\n needle.set_file_name(\"p\");\n\n assert!(\n !haystack.prefix_matches(&needle),\n \"{:?} should not have started with {:?}\",\n haystack,\n needle\n );\n }\n}" ]
f707aca217eee1788f83340bec2c4116ec13d89b
2,799
rs
Rust
tasks/zumkeller-numbers/src/main.rs
kaiuri/rust-rosetta
67862e06956f955cdf34743a7e5c7c93e4077b64
[ "Unlicense" ]
412
2015-01-02T10:29:34.000Z
2019-09-05T06:56:04.000Z
tasks/zumkeller-numbers/src/main.rs
kaiuri/rust-rosetta
67862e06956f955cdf34743a7e5c7c93e4077b64
[ "Unlicense" ]
290
2015-01-02T20:32:42.000Z
2019-09-05T14:17:06.000Z
tasks/zumkeller-numbers/src/main.rs
kaiuri/rust-rosetta
67862e06956f955cdf34743a7e5c7c93e4077b64
[ "Unlicense" ]
142
2015-01-10T21:16:16.000Z
2019-09-05T06:56:07.000Z
use std::convert::TryInto; /// Gets all divisors of a number, including itself fn get_divisors(n: u32) -> Vec<u32> { let mut results = Vec::new(); for i in 1..(n / 2 + 1) { if n % i == 0 { results.push(i); } } results.push(n); results } /// Calculates whether the divisors can be partitioned into two disjoint /// sets that sum to the same value fn is_summable(x: i32, divisors: &[u32]) -> bool { if !divisors.is_empty() { if divisors.contains(&(x as u32)) { return true; } else if let Some((first, t)) = divisors.split_first() { return is_summable(x - *first as i32, t) || is_summable(x, t); } } false } /// Calculates whether the number is a Zumkeller number /// Zumkeller numbers are the set of numbers whose divisors can be partitioned /// into two disjoint sets that sum to the same value. Each sum must contain /// divisor values that are not in the other sum, and all of the divisors must /// be in one or the other. fn is_zumkeller_number(number: u32) -> bool { if number % 18 == 6 || number % 18 == 12 { return true; } let div = get_divisors(number); let divisor_sum: u32 = div.iter().sum(); if divisor_sum == 0 { return false; } if divisor_sum % 2 == 1 { return false; } // numbers where n is odd and the abundance is even are Zumkeller numbers let abundance = divisor_sum as i32 - 2 * number as i32; if number % 2 == 1 && abundance > 0 && abundance % 2 == 0 { return true; } let half = divisor_sum / 2; return div.contains(&half) || (div.iter().filter(|&&d| d < half).count() > 0 && is_summable(half.try_into().unwrap(), &div)); } fn main() { println!("\nFirst 220 Zumkeller numbers:"); let mut counter: u32 = 0; let mut i: u32 = 0; while counter < 220 { if is_zumkeller_number(i) { print!("{:>3}", i); counter += 1; print!("{}", if counter % 20 == 0 { "\n" } else { "," }); } i += 1; } println!("\nFirst 40 odd Zumkeller numbers:"); let mut counter: u32 = 0; let mut i: u32 = 3; while counter < 40 { if is_zumkeller_number(i) { print!("{:>5}", i); counter += 1; print!("{}", if counter % 20 == 0 { "\n" } else { "," }); } i += 2; } } #[cfg(test)] mod tests { use super::is_zumkeller_number; #[test] fn test_is_zumkeller() { assert_eq!(is_zumkeller_number(0), false); assert_eq!(is_zumkeller_number(6), true); assert_eq!(is_zumkeller_number(20), true); assert_eq!(is_zumkeller_number(21), false); assert_eq!(is_zumkeller_number(198), true); } }
28.272727
78
0.557342
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_is_zumkeller() {\n assert_eq!(is_zumkeller_number(0), false);\n assert_eq!(is_zumkeller_number(6), true);\n assert_eq!(is_zumkeller_number(20), true);\n assert_eq!(is_zumkeller_number(21), false);\n assert_eq!(is_zumkeller_number(198), true);\n }\n}" ]
f707b18e8abe28cfcee4bc84c64e8ec7a39c3de2
8,151
rs
Rust
clap_complete/src/generator/utils.rs
isudzumi/clap
a5a56d58651281cb7a0f07baae5bfa8ca7bc8e82
[ "Apache-2.0", "MIT" ]
null
null
null
clap_complete/src/generator/utils.rs
isudzumi/clap
a5a56d58651281cb7a0f07baae5bfa8ca7bc8e82
[ "Apache-2.0", "MIT" ]
null
null
null
clap_complete/src/generator/utils.rs
isudzumi/clap
a5a56d58651281cb7a0f07baae5bfa8ca7bc8e82
[ "Apache-2.0", "MIT" ]
null
null
null
//! Helpers for writing generators use clap::{App, Arg}; /// Gets all subcommands including child subcommands in the form of `("name", "bin_name")`. /// /// Subcommand `rustup toolchain install` would be converted to /// `("install", "rustup toolchain install")`. pub fn all_subcommands(app: &App) -> Vec<(String, String)> { let mut subcmds: Vec<_> = subcommands(app); for sc_v in app.get_subcommands().map(all_subcommands) { subcmds.extend(sc_v); } subcmds } /// Finds the subcommand [`clap::App`] from the given [`clap::App`] with the given path. /// /// **NOTE:** `path` should not contain the root `bin_name`. pub fn find_subcommand_with_path<'help, 'app>( p: &'app App<'help>, path: Vec<&str>, ) -> &'app App<'help> { let mut app = p; for sc in path { app = app.find_subcommand(sc).unwrap(); } app } /// Gets subcommands of [`clap::App`] in the form of `("name", "bin_name")`. /// /// Subcommand `rustup toolchain install` would be converted to /// `("install", "rustup toolchain install")`. pub fn subcommands(p: &App) -> Vec<(String, String)> { debug!("subcommands: name={}", p.get_name()); debug!("subcommands: Has subcommands...{:?}", p.has_subcommands()); let mut subcmds = vec![]; if !p.has_subcommands() { return subcmds; } for sc in p.get_subcommands() { let sc_bin_name = sc.get_bin_name().unwrap(); debug!( "subcommands:iter: name={}, bin_name={}", sc.get_name(), sc_bin_name ); subcmds.push((sc.get_name().to_string(), sc_bin_name.to_string())); } subcmds } /// Gets all the short options, their visible aliases and flags of a [`clap::App`]. /// Includes `h` and `V` depending on the [`clap::AppSettings`]. pub fn shorts_and_visible_aliases(p: &App) -> Vec<char> { debug!("shorts: name={}", p.get_name()); p.get_arguments() .filter_map(|a| { if !a.is_positional() { if a.get_visible_short_aliases().is_some() && a.get_short().is_some() { let mut shorts_and_visible_aliases = a.get_visible_short_aliases().unwrap(); shorts_and_visible_aliases.push(a.get_short().unwrap()); Some(shorts_and_visible_aliases) } else if a.get_visible_short_aliases().is_none() && a.get_short().is_some() { Some(vec![a.get_short().unwrap()]) } else { None } } else { None } }) .flatten() .collect() } /// Gets all the long options, their visible aliases and flags of a [`clap::App`]. /// Includes `help` and `version` depending on the [`clap::AppSettings`]. pub fn longs_and_visible_aliases(p: &App) -> Vec<String> { debug!("longs: name={}", p.get_name()); p.get_arguments() .filter_map(|a| { if !a.is_positional() { if a.get_visible_aliases().is_some() && a.get_long().is_some() { let mut visible_aliases: Vec<_> = a .get_visible_aliases() .unwrap() .into_iter() .map(|s| s.to_string()) .collect(); visible_aliases.push(a.get_long().unwrap().to_string()); Some(visible_aliases) } else if a.get_visible_aliases().is_none() && a.get_long().is_some() { Some(vec![a.get_long().unwrap().to_string()]) } else { None } } else { None } }) .flatten() .collect() } /// Gets all the flags of a [`clap::App`](App). /// Includes `help` and `version` depending on the [`clap::AppSettings`]. pub fn flags<'help>(p: &App<'help>) -> Vec<Arg<'help>> { debug!("flags: name={}", p.get_name()); p.get_arguments() .filter(|a| !a.is_takes_value_set() && !a.is_positional()) .cloned() .collect() } #[cfg(test)] mod tests { use super::*; use clap::Arg; use pretty_assertions::assert_eq; fn common_app() -> App<'static> { App::new("myapp") .subcommand( App::new("test").subcommand(App::new("config")).arg( Arg::new("file") .short('f') .short_alias('c') .visible_short_alias('p') .long("file") .visible_alias("path"), ), ) .subcommand(App::new("hello")) .bin_name("my-app") } fn built() -> App<'static> { let mut app = common_app(); app._build_all(); app } fn built_with_version() -> App<'static> { let mut app = common_app().version("3.0"); app._build_all(); app } #[test] fn test_subcommands() { let app = built_with_version(); assert_eq!( subcommands(&app), vec![ ("test".to_string(), "my-app test".to_string()), ("hello".to_string(), "my-app hello".to_string()), ("help".to_string(), "my-app help".to_string()), ] ); } #[test] fn test_all_subcommands() { let app = built_with_version(); assert_eq!( all_subcommands(&app), vec![ ("test".to_string(), "my-app test".to_string()), ("hello".to_string(), "my-app hello".to_string()), ("help".to_string(), "my-app help".to_string()), ("config".to_string(), "my-app test config".to_string()), ("help".to_string(), "my-app test help".to_string()), ] ); } #[test] fn test_find_subcommand_with_path() { let app = built_with_version(); let sc_app = find_subcommand_with_path(&app, "test config".split(' ').collect()); assert_eq!(sc_app.get_name(), "config"); } #[test] fn test_flags() { let app = built_with_version(); let actual_flags = flags(&app); assert_eq!(actual_flags.len(), 2); assert_eq!(actual_flags[0].get_long(), Some("help")); assert_eq!(actual_flags[1].get_long(), Some("version")); let sc_flags = flags(find_subcommand_with_path(&app, vec!["test"])); assert_eq!(sc_flags.len(), 2); assert_eq!(sc_flags[0].get_long(), Some("file")); assert_eq!(sc_flags[1].get_long(), Some("help")); } #[test] fn test_flag_subcommand() { let app = built(); let actual_flags = flags(&app); assert_eq!(actual_flags.len(), 1); assert_eq!(actual_flags[0].get_long(), Some("help")); let sc_flags = flags(find_subcommand_with_path(&app, vec!["test"])); assert_eq!(sc_flags.len(), 2); assert_eq!(sc_flags[0].get_long(), Some("file")); assert_eq!(sc_flags[1].get_long(), Some("help")); } #[test] fn test_shorts() { let app = built_with_version(); let shorts = shorts_and_visible_aliases(&app); assert_eq!(shorts.len(), 2); assert_eq!(shorts[0], 'h'); assert_eq!(shorts[1], 'V'); let sc_shorts = shorts_and_visible_aliases(find_subcommand_with_path(&app, vec!["test"])); assert_eq!(sc_shorts.len(), 3); assert_eq!(sc_shorts[0], 'p'); assert_eq!(sc_shorts[1], 'f'); assert_eq!(sc_shorts[2], 'h'); } #[test] fn test_longs() { let app = built_with_version(); let longs = longs_and_visible_aliases(&app); assert_eq!(longs.len(), 2); assert_eq!(longs[0], "help"); assert_eq!(longs[1], "version"); let sc_longs = longs_and_visible_aliases(find_subcommand_with_path(&app, vec!["test"])); assert_eq!(sc_longs.len(), 3); assert_eq!(sc_longs[0], "path"); assert_eq!(sc_longs[1], "file"); assert_eq!(sc_longs[2], "help"); } }
30.414179
98
0.528033
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_subcommands() {\n let app = built_with_version();\n\n assert_eq!(\n subcommands(&app),\n vec![\n (\"test\".to_string(), \"my-app test\".to_string()),\n (\"hello\".to_string(), \"my-app hello\".to_string()),\n (\"help\".to_string(), \"my-app help\".to_string()),\n ]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_all_subcommands() {\n let app = built_with_version();\n\n assert_eq!(\n all_subcommands(&app),\n vec![\n (\"test\".to_string(), \"my-app test\".to_string()),\n (\"hello\".to_string(), \"my-app hello\".to_string()),\n (\"help\".to_string(), \"my-app help\".to_string()),\n (\"config\".to_string(), \"my-app test config\".to_string()),\n (\"help\".to_string(), \"my-app test help\".to_string()),\n ]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_find_subcommand_with_path() {\n let app = built_with_version();\n let sc_app = find_subcommand_with_path(&app, \"test config\".split(' ').collect());\n\n assert_eq!(sc_app.get_name(), \"config\");\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_flags() {\n let app = built_with_version();\n let actual_flags = flags(&app);\n\n assert_eq!(actual_flags.len(), 2);\n assert_eq!(actual_flags[0].get_long(), Some(\"help\"));\n assert_eq!(actual_flags[1].get_long(), Some(\"version\"));\n\n let sc_flags = flags(find_subcommand_with_path(&app, vec![\"test\"]));\n\n assert_eq!(sc_flags.len(), 2);\n assert_eq!(sc_flags[0].get_long(), Some(\"file\"));\n assert_eq!(sc_flags[1].get_long(), Some(\"help\"));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_flag_subcommand() {\n let app = built();\n let actual_flags = flags(&app);\n\n assert_eq!(actual_flags.len(), 1);\n assert_eq!(actual_flags[0].get_long(), Some(\"help\"));\n\n let sc_flags = flags(find_subcommand_with_path(&app, vec![\"test\"]));\n\n assert_eq!(sc_flags.len(), 2);\n assert_eq!(sc_flags[0].get_long(), Some(\"file\"));\n assert_eq!(sc_flags[1].get_long(), Some(\"help\"));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_shorts() {\n let app = built_with_version();\n let shorts = shorts_and_visible_aliases(&app);\n\n assert_eq!(shorts.len(), 2);\n assert_eq!(shorts[0], 'h');\n assert_eq!(shorts[1], 'V');\n\n let sc_shorts = shorts_and_visible_aliases(find_subcommand_with_path(&app, vec![\"test\"]));\n\n assert_eq!(sc_shorts.len(), 3);\n assert_eq!(sc_shorts[0], 'p');\n assert_eq!(sc_shorts[1], 'f');\n assert_eq!(sc_shorts[2], 'h');\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_longs() {\n let app = built_with_version();\n let longs = longs_and_visible_aliases(&app);\n\n assert_eq!(longs.len(), 2);\n assert_eq!(longs[0], \"help\");\n assert_eq!(longs[1], \"version\");\n\n let sc_longs = longs_and_visible_aliases(find_subcommand_with_path(&app, vec![\"test\"]));\n\n assert_eq!(sc_longs.len(), 3);\n assert_eq!(sc_longs[0], \"path\");\n assert_eq!(sc_longs[1], \"file\");\n assert_eq!(sc_longs[2], \"help\");\n }\n}" ]
f707b436301947711a2fd0cc285420341657f46f
2,394
rs
Rust
stack_priority_queue/src/solution/s0385_mini_parser.rs
kaifoon/leetcode
e251ef6cdb78cd4800a39a0fcc313b32125af51d
[ "Apache-2.0" ]
null
null
null
stack_priority_queue/src/solution/s0385_mini_parser.rs
kaifoon/leetcode
e251ef6cdb78cd4800a39a0fcc313b32125af51d
[ "Apache-2.0" ]
null
null
null
stack_priority_queue/src/solution/s0385_mini_parser.rs
kaifoon/leetcode
e251ef6cdb78cd4800a39a0fcc313b32125af51d
[ "Apache-2.0" ]
null
null
null
#![allow(unused)] pub struct Solution {} #[derive(Debug, PartialEq, Eq)] pub enum NestedInteger { Int(i32), List(Vec<NestedInteger>), } impl Solution { // Time O(N), Space O(N) Iterative Solution pub fn deserialize(s: String) -> NestedInteger { if !&s.starts_with("[") { return NestedInteger::Int(s.parse::<i32>().unwrap()); } let mut stack: Vec<NestedInteger> = vec![]; let mut digit_str: String = String::new(); for c in s.chars() { if c == '[' { stack.push(NestedInteger::List(vec![])); } else if c == '-' || c.is_digit(10) { digit_str.push(c); } else if c == ',' { if !digit_str.is_empty() { if let Some(v) = stack.last_mut() { if let NestedInteger::List(n) = v { n.push(NestedInteger::Int(digit_str.parse::<i32>().unwrap())); } } digit_str.truncate(0); } } else { if !digit_str.is_empty() { if let Some(v) = stack.last_mut() { if let NestedInteger::List(n) = v { n.push(NestedInteger::Int(digit_str.parse::<i32>().unwrap())); } } digit_str.truncate(0); } let n = stack.pop().unwrap(); if stack.is_empty() { return n; } else if let Some(v) = stack.last_mut() { if let NestedInteger::List(nst) = v { nst.push(n); } } } } NestedInteger::Int(-1) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_385() { assert_eq!( Solution::deserialize("324".to_string()), NestedInteger::Int(324) ); assert_eq!( Solution::deserialize("[123,[456,[789]]]".to_string()), NestedInteger::List(vec![ NestedInteger::Int(123), NestedInteger::List(vec![ NestedInteger::Int(456), NestedInteger::List(vec![NestedInteger::Int(789)]) ]) ]) ); } }
30.692308
90
0.419799
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_385() {\n assert_eq!(\n Solution::deserialize(\"324\".to_string()),\n NestedInteger::Int(324)\n );\n assert_eq!(\n Solution::deserialize(\"[123,[456,[789]]]\".to_string()),\n NestedInteger::List(vec![\n NestedInteger::Int(123),\n NestedInteger::List(vec![\n NestedInteger::Int(456),\n NestedInteger::List(vec![NestedInteger::Int(789)])\n ])\n ])\n );\n }\n}" ]
f707e72674925135019f3c2889f561b4debf3d29
12,186
rs
Rust
cryptapath/src/targets/prince.rs
Simula-UiB/CRHS
8f3dd34c8b99680188d9314c6897e0c790f5358f
[ "MIT" ]
5
2019-11-22T09:22:21.000Z
2021-04-09T12:56:52.000Z
cryptapath/src/targets/prince.rs
Simula-UiB/CRHS
8f3dd34c8b99680188d9314c6897e0c790f5358f
[ "MIT" ]
null
null
null
cryptapath/src/targets/prince.rs
Simula-UiB/CRHS
8f3dd34c8b99680188d9314c6897e0c790f5358f
[ "MIT" ]
4
2020-04-30T13:04:34.000Z
2021-04-11T01:26:27.000Z
use crate::sbox::Sbox; use crate::targets::Cipher; use crate::{bit, bit::Bit, bit::*}; use std::cell::RefCell; pub struct Prince { n_rounds: usize, message_length: usize, key_length: usize, constants: Vec<Vec<Bit>>, inv_table: Vec<u8>, m_prime: Vec<String>, whitening: bool, sbox: RefCell<Sbox>, } macro_rules! binary_matrix { [$([$head:expr;$row:expr;$tail:expr]),*] => { { let mut mat = Vec::new(); $( let mut tmp = "".to_string(); tmp.push_str(&str::repeat("0",$head)); tmp.push_str($row); tmp.push_str(&str::repeat("0",$tail)); mat.push(tmp); )* mat } }; } impl Prince { pub fn new(n_rounds: usize, whitening: bool) -> Self { assert!( n_rounds % 2 == 0, "to preserve the structure of prince, the number of round should be even" ); assert!(n_rounds <= 12); let table = vec![ 0xb, 0xf, 0x3, 0x2, 0xa, 0xc, 0x9, 0x1, 0x6, 0x7, 0x8, 0x0, 0xe, 0x5, 0xd, 0x4, ]; let inv_table = vec![ 0xb, 0x7, 0x3, 0x2, 0xf, 0xd, 0x8, 0x9, 0xa, 0x6, 0x4, 0x0, 0x5, 0xe, 0xc, 0x1, ]; let message_length = 64; let key_length = if whitening { 128 } else { 64 }; let constants = vec![ bit::bits_from_hex_string("0000000000000000"), bit::bits_from_hex_string("13198a2e03707344"), bit::bits_from_hex_string("a4093822299f31d0"), bit::bits_from_hex_string("082efa98ec4e6c89"), bit::bits_from_hex_string("452821e638d01377"), bit::bits_from_hex_string("be5466cf34e90c6c"), bit::bits_from_hex_string("7ef84f78fd955cb1"), bit::bits_from_hex_string("85840851f1ac43aa"), bit::bits_from_hex_string("c882d32f25323c54"), bit::bits_from_hex_string("64a51195e0e3610d"), bit::bits_from_hex_string("d3b5a399ca0c2399"), bit::bits_from_hex_string("c0ac29b7c97c50dd"), ]; let m_prime = binary_matrix![ //M0 [0;"0000100010001000";48], [0;"0100000001000100";48], [0;"0010001000000010";48], [0;"0001000100010000";48], [0;"1000100010000000";48], [0;"0000010001000100";48], [0;"0010000000100010";48], [0;"0001000100000001";48], [0;"1000100000001000";48], [0;"0100010001000000";48], [0;"0000001000100010";48], [0;"0001000000010001";48], [0;"1000000010001000";48], [0;"0100010000000100";48], [0;"0010001000100000";48], [0;"0000000100010001";48], //M1 [16;"1000100010000000";32], [16;"0000010001000100";32], [16;"0010000000100010";32], [16;"0001000100000001";32], [16;"1000100000001000";32], [16;"0100010001000000";32], [16;"0000001000100010";32], [16;"0001000000010001";32], [16;"1000000010001000";32], [16;"0100010000000100";32], [16;"0010001000100000";32], [16;"0000000100010001";32], [16;"0000100010001000";32], [16;"0100000001000100";32], [16;"0010001000000010";32], [16;"0001000100010000";32], //M1 [32;"1000100010000000";16], [32;"0000010001000100";16], [32;"0010000000100010";16], [32;"0001000100000001";16], [32;"1000100000001000";16], [32;"0100010001000000";16], [32;"0000001000100010";16], [32;"0001000000010001";16], [32;"1000000010001000";16], [32;"0100010000000100";16], [32;"0010001000100000";16], [32;"0000000100010001";16], [32;"0000100010001000";16], [32;"0100000001000100";16], [32;"0010001000000010";16], [32;"0001000100010000";16], //M0 [48;"0000100010001000";0], [48;"0100000001000100";0], [48;"0010001000000010";0], [48;"0001000100010000";0], [48;"1000100010000000";0], [48;"0000010001000100";0], [48;"0010000000100010";0], [48;"0001000100000001";0], [48;"1000100000001000";0], [48;"0100010001000000";0], [48;"0000001000100010";0], [48;"0001000000010001";0], [48;"1000000010001000";0], [48;"0100010000000100";0], [48;"0010001000100000";0], [48;"0000000100010001";0] ]; Prince { n_rounds, message_length, key_length, constants, inv_table, m_prime, whitening, sbox: RefCell::new(Sbox::new(4, 4, table, message_length + key_length)), } } fn add_round_key(&self, in_bits: Vec<Bit>, round_key: Vec<Bit>) -> Vec<Bit> { assert!(in_bits.len() == self.message_length); assert!(round_key.len() == self.message_length); bit_vector_xoring(in_bits, round_key) } fn add_constant(&self, in_bits: Vec<Bit>, round_index: usize) -> Vec<Bit> { assert!(in_bits.len() == self.message_length); bit_vector_xoring(in_bits, self.constants[round_index].clone()) } fn sbox_layer(&self, in_bits: Vec<Bit>) -> Vec<Bit> { assert!(in_bits.len() == self.message_length); let mut out_bits = Vec::with_capacity(self.message_length); for i in 0..16 { out_bits.append( &mut self .sbox .borrow() .apply(in_bits[i * 4..(i + 1) * 4].to_vec()), ); } out_bits } fn m_layer(&self, in_bits: Vec<Bit>) -> Vec<Bit> { let tmp = multiply_with_gf2_matrix(&self.m_prime, &in_bits); let mut out_bits = Vec::with_capacity(self.message_length); for row in 0..4 { for column in 0..4 { for bit in 0..4 { out_bits.push(tmp[bit + ((column * 5 + row * 4) % 16) * 4].clone()) } } } out_bits } fn m_layer_inv(&self, in_bits: Vec<Bit>) -> Vec<Bit> { let mut out_bits = Vec::with_capacity(self.message_length); for row in 0..4 { for column in 0..4 { for bit in 0..4 { out_bits.push(in_bits[bit + ((16 - column * 3 + row * 4) % 16) * 4].clone()) } } } multiply_with_gf2_matrix(&self.m_prime, &out_bits) } fn m_prime_layer(&self, in_bits: Vec<Bit>) -> Vec<Bit> { multiply_with_gf2_matrix(&self.m_prime, &in_bits) } fn make_round_keys(&self, key: Vec<Bit>) -> Vec<Vec<Bit>> { assert!(key.len() == self.key_length); let (k0, k1, k0_prime) = match self.key_length { 128 => { let k0 = key.iter().cloned().take(64).collect::<Vec<Bit>>(); let k1 = key.iter().cloned().skip(64).take(64).collect::<Vec<Bit>>(); let mut k0_prime = vec![k0[63].clone()]; k0_prime.append(&mut k0.iter().cloned().take(63).collect()); k0_prime[63] ^= k0[0].clone(); (k0, k1, k0_prime) } 64 => (key.clone(), key.clone(), key), _ => panic!("size of key should be 64 or 128"), }; let mut round_keys = Vec::new(); round_keys.push(k0); round_keys.push(k1); round_keys.push(k0_prime); round_keys } } fn multiply_with_gf2_matrix(matrix: &[String], in_bits: &[Bit]) -> Vec<Bit> { let mut out_bits = Vec::with_capacity(in_bits.len()); for row in matrix { let r = row.chars().collect::<Vec<char>>(); let mut tmp = bit!(false); for column in 0..64 { match r[column] { '1' => tmp ^= in_bits[column].clone(), '0' => (), _ => panic!("non binary character in binary string"), }; } out_bits.push(tmp) } out_bits } impl Cipher for Prince { fn encrypt(&self, in_bits: Vec<Bit>, key_bits: Vec<Bit>) -> Vec<Bit> { let round_keys = self.make_round_keys(key_bits); let mut out_bits = in_bits.clone(); if self.whitening { out_bits = self.add_round_key(out_bits, round_keys[0].clone()); } //Prince-core out_bits = self.add_constant(self.add_round_key(out_bits, round_keys[1].clone()), 0); for round in 1..self.n_rounds / 2 { out_bits = self.add_round_key( self.add_constant(self.m_layer(self.sbox_layer(out_bits)), round), round_keys[1].clone(), ); } out_bits = self.m_prime_layer(self.sbox_layer(out_bits)); self.sbox.replace(Sbox::replace_existing_sbox( 4, 4, self.inv_table.clone(), self.sbox.clone().into_inner(), )); out_bits = self.sbox_layer(out_bits); // Following the recommendation from the paper the reduced rounds are keeping the middle // symetry in an inside-out fashion // If I have 4 rounds, I will add the constants RC0, RC1, RC10 and RC11 for the encryption for (i, _) in (self.n_rounds / 2..self.n_rounds - 1).enumerate() { out_bits = self.sbox_layer(self.m_layer_inv(self.add_constant( self.add_round_key(out_bits, round_keys[1].clone()), 12 - (self.n_rounds / 2) + i, ))); } out_bits = self.add_round_key(self.add_constant(out_bits, 11), round_keys[1].clone()); if self.whitening { out_bits = self.add_round_key(out_bits, round_keys[2].clone()) } // We put back the original S-Box for future encryption using the same cipher self.sbox.replace(Sbox::replace_existing_sbox( 4, 4, vec![ 0xb, 0xf, 0x3, 0x2, 0xa, 0xc, 0x9, 0x1, 0x6, 0x7, 0x8, 0x0, 0xe, 0x5, 0xd, 0x4, ], self.sbox.clone().into_inner(), )); out_bits } fn message_length(&self) -> usize { self.message_length } fn key_length(&self) -> usize { self.key_length } fn n_rounds(&self) -> usize { self.n_rounds } fn sbox(&self) -> Sbox { self.sbox.borrow().clone() } } // from https://eprint.iacr.org/2012/529.pdf #[cfg(test)] mod test { use crate::bit; use crate::targets::{prince::Prince, Cipher}; #[test] fn validate_encrypt() { let prince = Prince::new(12, true); let message = bit::bits_from_hex_string("0000000000000000"); let key = bit::bits_from_hex_string("00000000000000000000000000000000"); let ciphertext = prince.encrypt(message, key); assert_eq!("818665aa0d02dfda", bit::bits_to_hex_string(ciphertext)); let prince = Prince::new(12, true); let message = bit::bits_from_hex_string("ffffffffffffffff"); let key = bit::bits_from_hex_string("00000000000000000000000000000000"); let ciphertext = prince.encrypt(message, key); assert_eq!("604ae6ca03c20ada", bit::bits_to_hex_string(ciphertext)); let prince = Prince::new(12, true); let message = bit::bits_from_hex_string("0000000000000000"); let key = bit::bits_from_hex_string("ffffffffffffffff0000000000000000"); let ciphertext = prince.encrypt(message, key); assert_eq!("9fb51935fc3df524", bit::bits_to_hex_string(ciphertext)); let prince = Prince::new(12, true); let message = bit::bits_from_hex_string("0000000000000000"); let key = bit::bits_from_hex_string("0000000000000000ffffffffffffffff"); let ciphertext = prince.encrypt(message, key); assert_eq!("78a54cbe737bb7ef", bit::bits_to_hex_string(ciphertext)); let prince = Prince::new(12, true); let message = bit::bits_from_hex_string("0123456789abcdef"); let key = bit::bits_from_hex_string("0000000000000000fedcba9876543210"); let ciphertext = prince.encrypt(message, key); assert_eq!("ae25ad3ca8fa9ccf", bit::bits_to_hex_string(ciphertext)); } }
35.631579
98
0.556951
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn validate_encrypt() {\n let prince = Prince::new(12, true);\n let message = bit::bits_from_hex_string(\"0000000000000000\");\n let key = bit::bits_from_hex_string(\"00000000000000000000000000000000\");\n let ciphertext = prince.encrypt(message, key);\n assert_eq!(\"818665aa0d02dfda\", bit::bits_to_hex_string(ciphertext));\n\n let prince = Prince::new(12, true);\n let message = bit::bits_from_hex_string(\"ffffffffffffffff\");\n let key = bit::bits_from_hex_string(\"00000000000000000000000000000000\");\n let ciphertext = prince.encrypt(message, key);\n assert_eq!(\"604ae6ca03c20ada\", bit::bits_to_hex_string(ciphertext));\n\n let prince = Prince::new(12, true);\n let message = bit::bits_from_hex_string(\"0000000000000000\");\n let key = bit::bits_from_hex_string(\"ffffffffffffffff0000000000000000\");\n let ciphertext = prince.encrypt(message, key);\n assert_eq!(\"9fb51935fc3df524\", bit::bits_to_hex_string(ciphertext));\n\n let prince = Prince::new(12, true);\n let message = bit::bits_from_hex_string(\"0000000000000000\");\n let key = bit::bits_from_hex_string(\"0000000000000000ffffffffffffffff\");\n let ciphertext = prince.encrypt(message, key);\n assert_eq!(\"78a54cbe737bb7ef\", bit::bits_to_hex_string(ciphertext));\n\n let prince = Prince::new(12, true);\n let message = bit::bits_from_hex_string(\"0123456789abcdef\");\n let key = bit::bits_from_hex_string(\"0000000000000000fedcba9876543210\");\n let ciphertext = prince.encrypt(message, key);\n assert_eq!(\"ae25ad3ca8fa9ccf\", bit::bits_to_hex_string(ciphertext));\n }\n}" ]
f707e8d4914510e15c0d2541e8f9306dc66da188
63,256
rs
Rust
edgelet/edgelet-docker/tests/runtime.rs
marodev/iotedge
065bf32973fba25f89327b8756308593d778b3b7
[ "MIT" ]
null
null
null
edgelet/edgelet-docker/tests/runtime.rs
marodev/iotedge
065bf32973fba25f89327b8756308593d778b3b7
[ "MIT" ]
null
null
null
edgelet/edgelet-docker/tests/runtime.rs
marodev/iotedge
065bf32973fba25f89327b8756308593d778b3b7
[ "MIT" ]
null
null
null
// Copyright (c) Microsoft. All rights reserved. #![deny(rust_2018_idioms, warnings)] #![deny(clippy::all, clippy::pedantic)] #![allow(clippy::default_trait_access, clippy::too_many_lines)] use std::collections::{BTreeMap, HashMap}; use std::str; use std::sync::{Arc, RwLock}; use std::time::Duration; use failure::Fail; use futures::future; use futures::prelude::*; use hyper::{Body, Method, Request, Response, StatusCode}; use maplit::btreemap; use serde_json::{self, json}; use tempfile::NamedTempFile; use typed_headers::{mime, ContentLength, ContentType, HeaderMapExt}; use url::form_urlencoded::parse as parse_query; use docker::models::{ AuthConfig, ContainerCreateBody, ContainerHostConfig, ContainerNetworkSettings, ContainerSummary, HostConfig, HostConfigPortBindings, ImageDeleteResponseItem, NetworkConfig, }; use edgelet_core::{ ImagePullPolicy, LogOptions, LogTail, MakeModuleRuntime, Module, ModuleRegistry, ModuleRuntime, ModuleSpec, RegistryOperation, RuntimeOperation, }; use edgelet_docker::{DockerConfig, DockerModuleRuntime, Settings}; use edgelet_docker::{Error, ErrorKind}; use edgelet_test_utils::web::{ make_req_dispatcher, HttpMethod, RequestHandler, RequestPath, ResponseFuture, }; use edgelet_test_utils::{routes, run_tcp_server}; use hyper::Error as HyperError; const IMAGE_NAME: &str = "nginx:latest"; const INVALID_IMAGE_NAME: &str = "invalidname:latest"; const INVALID_IMAGE_HOST: &str = "invalidhost.com/nginx:latest"; fn make_settings(moby_runtime: &str) -> Settings { use std::io::Write; lazy_static::lazy_static! { static ref ENV_LOCK: std::sync::Mutex<()> = Default::default(); } let _env_lock = ENV_LOCK.lock().expect("env lock poisoned"); let mut config_file = NamedTempFile::new().expect("could not create tempfile for config"); config_file .write_all( r#" hostname = "zoo" homedir = "/var/lib/aziot/edged" [agent] name = "edgeAgent" type = "docker" [agent.config] image = "microsoft/azureiotedge-agent:1.0" [connect] workload_uri = "unix:///var/lib/iotedge/workload.sock" management_uri = "unix:///var/lib/iotedge/mgmt.sock" [listen] workload_uri = "unix:///var/lib/iotedge/workload.sock" management_uri = "unix:///var/lib/iotedge/mgmt.sock" "# .as_bytes(), ) .expect("could not write to config file"); config_file .write_all(moby_runtime.as_bytes()) .expect("could not write to config file"); std::env::set_var("AZIOT_EDGED_CONFIG", config_file.path()); Settings::new().unwrap() } fn make_get_networks_handler( on_get: impl Fn() -> String + Clone + Send + 'static, ) -> impl Fn(Request<Body>) -> ResponseFuture + Clone { move |_| { let response = on_get(); let response_len = response.len(); let mut response = Response::new(response.into()); response .headers_mut() .typed_insert(&ContentLength(response_len as u64)); response .headers_mut() .typed_insert(&ContentType(mime::APPLICATION_JSON)); Box::new(future::ok(response)) as ResponseFuture } } fn make_create_network_handler( on_post: impl Fn(Request<Body>) + Clone + Send + 'static, ) -> impl Fn(Request<Body>) -> ResponseFuture + Clone { move |req| { on_post(req); let response = json!({ "Id": "12345", "Warnings": "" }) .to_string(); let response_len = response.len(); let mut response = Response::new(response.into()); response .headers_mut() .typed_insert(&ContentLength(response_len as u64)); response .headers_mut() .typed_insert(&ContentType(mime::APPLICATION_JSON)); Box::new(future::ok(response)) as ResponseFuture } } fn not_found_handler(_: Request<Body>) -> ResponseFuture { let response = Response::builder() .status(StatusCode::NOT_FOUND) .body(Body::default()) .unwrap(); Box::new(future::ok(response)) } fn make_network_handler( on_get: impl Fn() -> String + Clone + Send + 'static, on_post: impl Fn(Request<Body>) + Clone + Send + 'static, ) -> impl Fn(Request<Body>) -> Box<dyn Future<Item = Response<Body>, Error = HyperError> + Send> + Clone { let dispatch_table = routes!( GET "/networks" => make_get_networks_handler(on_get), POST "/networks/create" => make_create_network_handler(on_post), ); make_req_dispatcher(dispatch_table, Box::new(not_found_handler)) } fn default_get_networks_handler() -> impl Fn(Request<Body>) -> ResponseFuture + Clone { make_get_networks_handler(|| json!([]).to_string()) } fn default_create_network_handler() -> impl Fn(Request<Body>) -> ResponseFuture + Clone { make_create_network_handler(|_| ()) } fn default_network_handler( ) -> impl Fn(Request<Body>) -> Box<dyn Future<Item = Response<Body>, Error = HyperError> + Send> + Clone { let dispatch_table = routes!( GET "/networks" => default_get_networks_handler(), POST "/networks/create" => default_create_network_handler(), ); make_req_dispatcher(dispatch_table, Box::new(not_found_handler)) } #[allow(clippy::needless_pass_by_value)] fn invalid_image_name_pull_handler(req: Request<Body>) -> ResponseFuture { // verify that path is /images/create and that the "fromImage" query // parameter has the image name we expect assert_eq!(req.uri().path(), "/images/create"); let query_map: HashMap<String, String> = parse_query(req.uri().query().unwrap().as_bytes()) .into_owned() .collect(); assert!(query_map.contains_key("fromImage")); assert_eq!( query_map.get("fromImage").map(AsRef::as_ref), Some(INVALID_IMAGE_NAME) ); let response = format!( r#"{{ "message": "manifest for {} not found" }} "#, INVALID_IMAGE_NAME ); let response_len = response.len(); let mut response = Response::new(response.into()); response .headers_mut() .typed_insert(&ContentLength(response_len as u64)); response .headers_mut() .typed_insert(&ContentType(mime::APPLICATION_JSON)); *response.status_mut() = hyper::StatusCode::NOT_FOUND; Box::new(future::ok(response)) } #[test] fn image_pull_with_invalid_image_name_fails() { let dispatch_table = routes!( GET "/networks" => default_get_networks_handler(), POST "/networks/create" => default_create_network_handler(), POST "/images/create" => invalid_image_name_pull_handler, ); let (server, port) = run_tcp_server( "127.0.0.1", make_req_dispatcher(dispatch_table, Box::new(not_found_handler)), ); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| { let auth = AuthConfig::new() .with_username("u1".to_string()) .with_password("bleh".to_string()) .with_email("u1@bleh.com".to_string()) .with_serveraddress("svr1".to_string()); let config = DockerConfig::new( INVALID_IMAGE_NAME.to_string(), ContainerCreateBody::new(), None, Some(auth), ) .unwrap(); runtime.pull(&config) }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); // Assert let err = runtime .block_on(task) .expect_err("Expected runtime pull method to fail due to invalid image name."); match (err.kind(), err.cause().and_then(Fail::downcast_ref)) { ( edgelet_docker::ErrorKind::RegistryOperation( edgelet_core::RegistryOperation::PullImage(name), ), Some(edgelet_docker::ErrorKind::NotFound(message)), ) if name == INVALID_IMAGE_NAME => { assert_eq!( &format!("manifest for {} not found", INVALID_IMAGE_NAME), message ); } _ => panic!( "Specific docker runtime message is expected for invalid image name. Got {:?}", err.kind() ), } } #[allow(clippy::needless_pass_by_value)] fn invalid_image_host_pull_handler(req: Request<Body>) -> ResponseFuture { // verify that path is /images/create and that the "fromImage" query // parameter has the image name we expect assert_eq!(req.uri().path(), "/images/create"); let query_map: HashMap<String, String> = parse_query(req.uri().query().unwrap().as_bytes()) .into_owned() .collect(); assert!(query_map.contains_key("fromImage")); assert_eq!( query_map.get("fromImage").map(AsRef::as_ref), Some(INVALID_IMAGE_HOST) ); let response = format!( r#" {{ "message":"Get https://invalidhost.com: dial tcp: lookup {} on X.X.X.X: no such host" }} "#, INVALID_IMAGE_HOST ); let response_len = response.len(); let mut response = Response::new(response.into()); response .headers_mut() .typed_insert(&ContentLength(response_len as u64)); response .headers_mut() .typed_insert(&ContentType(mime::APPLICATION_JSON)); *response.status_mut() = hyper::StatusCode::INTERNAL_SERVER_ERROR; Box::new(future::ok(response)) } #[test] fn image_pull_with_invalid_image_host_fails() { let dispatch_table = routes!( GET "/networks" => default_get_networks_handler(), POST "/networks/create" => default_create_network_handler(), POST "/images/create" => invalid_image_host_pull_handler, ); let (server, port) = run_tcp_server( "127.0.0.1", make_req_dispatcher(dispatch_table, Box::new(not_found_handler)), ); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| { let auth = AuthConfig::new() .with_username("u1".to_string()) .with_password("bleh".to_string()) .with_email("u1@bleh.com".to_string()) .with_serveraddress("svr1".to_string()); let config = DockerConfig::new( INVALID_IMAGE_HOST.to_string(), ContainerCreateBody::new(), None, Some(auth), ) .unwrap(); runtime.pull(&config) }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); // Assert let err = runtime .block_on(task) .expect_err("Expected runtime pull method to fail due to invalid image host."); match (err.kind(), err.cause().and_then(Fail::downcast_ref)) { ( edgelet_docker::ErrorKind::RegistryOperation( edgelet_core::RegistryOperation::PullImage(name), ), Some(edgelet_docker::ErrorKind::FormattedDockerRuntime(message)), ) if name == INVALID_IMAGE_HOST => { assert_eq!( &format!( "Get https://invalidhost.com: dial tcp: lookup {} on X.X.X.X: no such host", INVALID_IMAGE_HOST ), message ); } _ => panic!( "Specific docker runtime message is expected for invalid image host. Got {:?}", err.kind() ), } } #[allow(clippy::needless_pass_by_value)] fn image_pull_with_invalid_creds_handler(req: Request<Body>) -> ResponseFuture { // verify that path is /images/create and that the "fromImage" query // parameter has the image name we expect assert_eq!(req.uri().path(), "/images/create"); let query_map: HashMap<String, String> = parse_query(req.uri().query().unwrap().as_bytes()) .into_owned() .collect(); assert!(query_map.contains_key("fromImage")); assert_eq!(query_map.get("fromImage"), Some(&IMAGE_NAME.to_string())); // verify registry creds let auth_str = req .headers() .get_all("X-Registry-Auth") .into_iter() .map(|bytes| base64::decode_config(bytes, base64::URL_SAFE).unwrap()) .map(|raw| str::from_utf8(&raw).unwrap().to_owned()) .collect::<String>(); let auth_config: AuthConfig = serde_json::from_str(&auth_str).unwrap(); assert_eq!(auth_config.username(), Some("us1")); assert_eq!(auth_config.password(), Some("ac?ac~aaac???")); assert_eq!(auth_config.email(), Some("u1@bleh.com")); assert_eq!(auth_config.serveraddress(), Some("svr1")); let response = format!( r#" {{ "message":"Get {}: unauthorized: authentication required" }} "#, IMAGE_NAME ); let response_len = response.len(); let mut response = Response::new(response.into()); response .headers_mut() .typed_insert(&ContentLength(response_len as u64)); response .headers_mut() .typed_insert(&ContentType(mime::APPLICATION_JSON)); *response.status_mut() = hyper::StatusCode::INTERNAL_SERVER_ERROR; Box::new(future::ok(response)) } #[test] fn image_pull_with_invalid_creds_fails() { let dispatch_table = routes!( GET "/networks" => default_get_networks_handler(), POST "/networks/create" => default_create_network_handler(), POST "/images/create" => image_pull_with_invalid_creds_handler, ); let (server, port) = run_tcp_server( "127.0.0.1", make_req_dispatcher(dispatch_table, Box::new(not_found_handler)), ); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| { // password is written to guarantee base64 encoding has '-' and/or '_' let auth = AuthConfig::new() .with_username("us1".to_string()) .with_password("ac?ac~aaac???".to_string()) .with_email("u1@bleh.com".to_string()) .with_serveraddress("svr1".to_string()); let config = DockerConfig::new( IMAGE_NAME.to_string(), ContainerCreateBody::new(), None, Some(auth), ) .unwrap(); runtime.pull(&config) }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); // Assert let err = runtime .block_on(task) .expect_err("Expected runtime pull method to fail due to unauthentication."); match (err.kind(), err.cause().and_then(Fail::downcast_ref)) { ( edgelet_docker::ErrorKind::RegistryOperation( edgelet_core::RegistryOperation::PullImage(name), ), Some(edgelet_docker::ErrorKind::FormattedDockerRuntime(message)), ) if name == IMAGE_NAME => { assert_eq!( &format!( "Get {}: unauthorized: authentication required", &IMAGE_NAME.to_string() ), message ); } _ => panic!( "Specific docker runtime message is expected for unauthentication. Got {:?}", err.kind() ), } } #[allow(clippy::needless_pass_by_value)] fn image_pull_handler(req: Request<Body>) -> ResponseFuture { // verify that path is /images/create and that the "fromImage" query // parameter has the image name we expect assert_eq!(req.uri().path(), "/images/create"); let query_map: HashMap<String, String> = parse_query(req.uri().query().unwrap().as_bytes()) .into_owned() .collect(); assert!(query_map.contains_key("fromImage")); assert_eq!(query_map.get("fromImage"), Some(&IMAGE_NAME.to_string())); let response = r#" { "Id": "img1", "Warnings": [] } "#; let response_len = response.len(); let mut response = Response::new(response.into()); response .headers_mut() .typed_insert(&ContentLength(response_len as u64)); response .headers_mut() .typed_insert(&ContentType(mime::APPLICATION_JSON)); Box::new(future::ok(response)) } #[test] fn image_pull_succeeds() { let dispatch_table = routes!( GET "/networks" => default_get_networks_handler(), POST "/networks/create" => default_create_network_handler(), POST "/images/create" => image_pull_handler, ); let (server, port) = run_tcp_server( "127.0.0.1", make_req_dispatcher(dispatch_table, Box::new(not_found_handler)), ); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| { let auth = AuthConfig::new() .with_username("u1".to_string()) .with_password("bleh".to_string()) .with_email("u1@bleh.com".to_string()) .with_serveraddress("svr1".to_string()); let config = DockerConfig::new( IMAGE_NAME.to_string(), ContainerCreateBody::new(), None, Some(auth), ) .unwrap(); runtime.pull(&config) }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[allow(clippy::needless_pass_by_value)] fn image_pull_with_creds_handler(req: Request<Body>) -> ResponseFuture { // verify that path is /images/create and that the "fromImage" query // parameter has the image name we expect assert_eq!(req.uri().path(), "/images/create"); let query_map: HashMap<String, String> = parse_query(req.uri().query().unwrap().as_bytes()) .into_owned() .collect(); assert!(query_map.contains_key("fromImage")); assert_eq!(query_map.get("fromImage"), Some(&IMAGE_NAME.to_string())); // verify registry creds let auth_str = req .headers() .get_all("X-Registry-Auth") .into_iter() .map(|bytes| base64::decode_config(bytes, base64::URL_SAFE).unwrap()) .map(|raw| str::from_utf8(&raw).unwrap().to_owned()) .collect::<String>(); let auth_config: AuthConfig = serde_json::from_str(&auth_str).unwrap(); assert_eq!(auth_config.username(), Some("u1")); assert_eq!(auth_config.password(), Some("bleh")); assert_eq!(auth_config.email(), Some("u1@bleh.com")); assert_eq!(auth_config.serveraddress(), Some("svr1")); let response = r#" { "Id": "img1", "Warnings": [] } "#; let response_len = response.len(); let mut response = Response::new(response.into()); response .headers_mut() .typed_insert(&ContentLength(response_len as u64)); response .headers_mut() .typed_insert(&ContentType(mime::APPLICATION_JSON)); Box::new(future::ok(response)) } #[test] fn image_pull_with_creds_succeeds() { let dispatch_table = routes!( GET "/networks" => default_get_networks_handler(), POST "/networks/create" => default_create_network_handler(), POST "/images/create" => image_pull_with_creds_handler, ); let (server, port) = run_tcp_server( "127.0.0.1", make_req_dispatcher(dispatch_table, Box::new(not_found_handler)), ); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| { let auth = AuthConfig::new() .with_username("u1".to_string()) .with_password("bleh".to_string()) .with_email("u1@bleh.com".to_string()) .with_serveraddress("svr1".to_string()); let config = DockerConfig::new( IMAGE_NAME.to_string(), ContainerCreateBody::new(), None, Some(auth), ) .unwrap(); runtime.pull(&config) }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[allow(clippy::needless_pass_by_value)] fn image_remove_handler(req: Request<Body>) -> ResponseFuture { assert_eq!(req.method(), &Method::DELETE); assert_eq!(req.uri().path(), &format!("/images/{}", IMAGE_NAME)); let response = serde_json::to_string(&vec![ ImageDeleteResponseItem::new().with_deleted(IMAGE_NAME.to_string()) ]) .unwrap(); let response_len = response.len(); let mut response = Response::new(response.into()); response .headers_mut() .typed_insert(&ContentLength(response_len as u64)); response .headers_mut() .typed_insert(&ContentType(mime::APPLICATION_JSON)); Box::new(future::ok(response)) } #[test] fn image_remove_succeeds() { let dispatch_table = routes!( GET "/networks" => default_get_networks_handler(), POST "/networks/create" => default_create_network_handler(), DELETE format!("/images/{}", IMAGE_NAME) => image_remove_handler, ); let (server, port) = run_tcp_server( "127.0.0.1", make_req_dispatcher(dispatch_table, Box::new(not_found_handler)), ); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let task = DockerModuleRuntime::make_runtime(settings) .and_then(|runtime| ModuleRegistry::remove(&runtime, IMAGE_NAME)); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } fn container_create_handler(req: Request<Body>) -> ResponseFuture { assert_eq!(req.method(), &Method::POST); assert_eq!(req.uri().path(), "/containers/create"); let response = json!({ "Id": "12345", "Warnings": [] }) .to_string(); let response_len = response.len(); Box::new( req.into_body() .concat2() .and_then(|body| { let create_options: ContainerCreateBody = serde_json::from_slice(body.as_ref()).unwrap(); assert_eq!("nginx:latest", create_options.image().unwrap()); for &v in &["/do/the/custom/command", "with these args"] { assert!(create_options.cmd().unwrap().contains(&v.to_string())); } for &v in &["/also/do/the/entrypoint", "and this"] { assert!(create_options .entrypoint() .unwrap() .contains(&v.to_string())); } for &v in &["k1=v1", "k2=v2", "k3=v3", "k4=v4", "k5=v5"] { assert!(create_options.env().unwrap().contains(&v.to_string())); } let port_bindings = create_options .host_config() .unwrap() .port_bindings() .unwrap(); assert_eq!( "8080", port_bindings .get("80/tcp") .unwrap() .iter() .next() .unwrap() .host_port() .unwrap() ); assert_eq!( "11022", port_bindings .get("22/tcp") .unwrap() .iter() .next() .unwrap() .host_port() .unwrap() ); let volumes = create_options.volumes().unwrap(); let mut expected = ::std::collections::BTreeMap::new(); expected.insert("test1".to_string(), json!({})); assert_eq!(*volumes, expected); Ok(()) }) .map(move |_| { let mut response = Response::new(response.into()); response .headers_mut() .typed_insert(&ContentLength(response_len as u64)); response .headers_mut() .typed_insert(&ContentType(mime::APPLICATION_JSON)); response }), ) } #[test] fn container_create_succeeds() { let dispatch_table = routes!( GET "/networks" => default_get_networks_handler(), POST "/networks/create" => default_create_network_handler(), POST "/containers/create" => container_create_handler, ); let (server, port) = run_tcp_server( "127.0.0.1", make_req_dispatcher(dispatch_table, Box::new(not_found_handler)), ); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| { let mut env = BTreeMap::new(); env.insert("k1".to_string(), "v1".to_string()); env.insert("k2".to_string(), "v2".to_string()); env.insert("k3".to_string(), "v3".to_string()); // add some create options let mut port_bindings = BTreeMap::new(); port_bindings.insert( "22/tcp".to_string(), vec![HostConfigPortBindings::new().with_host_port("11022".to_string())], ); port_bindings.insert( "80/tcp".to_string(), vec![HostConfigPortBindings::new().with_host_port("8080".to_string())], ); let memory: i64 = 3_221_225_472; let mut volumes = ::std::collections::BTreeMap::new(); volumes.insert("test1".to_string(), json!({})); let create_options = ContainerCreateBody::new() .with_host_config( HostConfig::new() .with_port_bindings(port_bindings) .with_memory(memory), ) .with_cmd(vec![ "/do/the/custom/command".to_string(), "with these args".to_string(), ]) .with_entrypoint(vec![ "/also/do/the/entrypoint".to_string(), "and this".to_string(), ]) .with_env(vec!["k4=v4".to_string(), "k5=v5".to_string()]) .with_volumes(volumes); let module_config = ModuleSpec::new( "m1".to_string(), "docker".to_string(), DockerConfig::new("nginx:latest".to_string(), create_options, None, None).unwrap(), env, ImagePullPolicy::default(), ) .unwrap(); runtime.create(module_config) }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[allow(clippy::needless_pass_by_value)] fn container_start_handler(req: Request<Body>) -> ResponseFuture { assert_eq!(req.method(), &Method::POST); assert_eq!(req.uri().path(), "/containers/m1/start"); Box::new(future::ok(Response::new(Body::empty()))) } #[test] fn container_start_succeeds() { let dispatch_table = routes!( GET "/networks" => default_get_networks_handler(), POST "/networks/create" => default_create_network_handler(), POST "/containers/m1/start" => container_start_handler, ); let (server, port) = run_tcp_server( "127.0.0.1", make_req_dispatcher(dispatch_table, Box::new(not_found_handler)), ); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| runtime.start("m1")); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[allow(clippy::needless_pass_by_value)] fn container_stop_handler(req: Request<Body>) -> ResponseFuture { assert_eq!(req.method(), &Method::POST); assert_eq!(req.uri().path(), "/containers/m1/stop"); Box::new(future::ok(Response::new(Body::empty()))) } #[test] fn container_stop_succeeds() { let dispatch_table = routes!( GET "/networks" => default_get_networks_handler(), POST "/networks/create" => default_create_network_handler(), POST "/containers/m1/stop" => container_stop_handler, ); let (server, port) = run_tcp_server( "127.0.0.1", make_req_dispatcher(dispatch_table, Box::new(not_found_handler)), ); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| runtime.stop("m1", None)); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[allow(clippy::needless_pass_by_value)] fn container_stop_with_timeout_handler(req: Request<Body>) -> ResponseFuture { assert_eq!(req.method(), &Method::POST); assert_eq!(req.uri().path(), "/containers/m1/stop"); assert_eq!(req.uri().query().unwrap(), "t=600"); Box::new(future::ok(Response::new(Body::empty()))) } #[test] fn container_stop_with_timeout_succeeds() { let dispatch_table = routes!( GET "/networks" => default_get_networks_handler(), POST "/networks/create" => default_create_network_handler(), POST "/containers/m1/stop" => container_stop_with_timeout_handler, ); let (server, port) = run_tcp_server( "127.0.0.1", make_req_dispatcher(dispatch_table, Box::new(not_found_handler)), ); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let task = DockerModuleRuntime::make_runtime(settings) .and_then(|runtime| runtime.stop("m1", Some(Duration::from_secs(600)))); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[allow(clippy::needless_pass_by_value)] fn container_remove_handler(req: Request<Body>) -> ResponseFuture { assert_eq!(req.method(), &Method::DELETE); assert_eq!(req.uri().path(), "/containers/m1"); Box::new(future::ok(Response::new(Body::empty()))) } #[test] fn container_remove_succeeds() { let dispatch_table = routes!( GET "/networks" => default_get_networks_handler(), POST "/networks/create" => default_create_network_handler(), DELETE "/containers/m1" => container_remove_handler, ); let (server, port) = run_tcp_server( "127.0.0.1", make_req_dispatcher(dispatch_table, Box::new(not_found_handler)), ); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let task = DockerModuleRuntime::make_runtime(settings) .and_then(|runtime| ModuleRuntime::remove(&runtime, "m1")); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[allow(clippy::needless_pass_by_value)] fn container_list_handler(req: Request<Body>) -> ResponseFuture { assert_eq!(req.method(), &Method::GET); assert_eq!(req.uri().path(), "/containers/json"); let query_map: HashMap<String, String> = parse_query(req.uri().query().unwrap().as_bytes()) .into_owned() .collect(); assert!(query_map.contains_key("filters")); assert_eq!( query_map.get("filters"), Some( &json!({ "label": vec!["net.azure-devices.edge.owner=Microsoft.Azure.Devices.Edge.Agent"] }) .to_string() ) ); let mut labels = HashMap::new(); labels.insert("l1".to_string(), "v1".to_string()); labels.insert("l2".to_string(), "v2".to_string()); labels.insert("l3".to_string(), "v3".to_string()); let modules = vec![ ContainerSummary::new( "m1".to_string(), vec!["/m1".to_string()], "nginx:latest".to_string(), "img1".to_string(), "".to_string(), 10, vec![], 10, 10, labels.clone(), "".to_string(), "".to_string(), ContainerHostConfig::new(""), ContainerNetworkSettings::new(HashMap::new()), vec![], ), ContainerSummary::new( "m2".to_string(), vec!["/m2".to_string()], "ubuntu:latest".to_string(), "img2".to_string(), "".to_string(), 10, vec![], 10, 10, labels.clone(), "".to_string(), "".to_string(), ContainerHostConfig::new(""), ContainerNetworkSettings::new(HashMap::new()), vec![], ), ContainerSummary::new( "m3".to_string(), vec!["/m3".to_string()], "mongo:latest".to_string(), "img3".to_string(), "".to_string(), 10, vec![], 10, 10, labels, "".to_string(), "".to_string(), ContainerHostConfig::new(""), ContainerNetworkSettings::new(HashMap::new()), vec![], ), ]; let response = serde_json::to_string(&modules).unwrap(); let response_len = response.len(); let mut response = Response::new(response.into()); response .headers_mut() .typed_insert(&ContentLength(response_len as u64)); response .headers_mut() .typed_insert(&ContentType(mime::APPLICATION_JSON)); Box::new(future::ok(response)) } #[test] fn container_list_succeeds() { let dispatch_table = routes!( GET "/networks" => default_get_networks_handler(), POST "/networks/create" => default_create_network_handler(), GET "/containers/json" => container_list_handler, ); let (server, port) = run_tcp_server( "127.0.0.1", make_req_dispatcher(dispatch_table, Box::new(not_found_handler)), ); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| runtime.list()); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); let modules = runtime.block_on(task).unwrap(); assert_eq!(3, modules.len()); assert_eq!("m1", modules[0].name()); assert_eq!("m2", modules[1].name()); assert_eq!("m3", modules[2].name()); assert_eq!("img1", modules[0].config().image_id().unwrap()); assert_eq!("img2", modules[1].config().image_id().unwrap()); assert_eq!("img3", modules[2].config().image_id().unwrap()); assert_eq!("nginx:latest", modules[0].config().image()); assert_eq!("ubuntu:latest", modules[1].config().image()); assert_eq!("mongo:latest", modules[2].config().image()); for module in modules { for i in 0..3 { assert_eq!( module .config() .create_options() .labels() .unwrap() .get(&format!("l{}", i + 1)), Some(&format!("v{}", i + 1)) ); } } } #[allow(clippy::needless_pass_by_value)] fn container_logs_handler(req: Request<Body>) -> ResponseFuture { assert_eq!(req.method(), &Method::GET); assert_eq!(req.uri().path(), "/containers/mod1/logs"); let query_map: HashMap<String, String> = parse_query(req.uri().query().unwrap().as_bytes()) .into_owned() .collect(); assert!(query_map.contains_key("stdout")); assert!(query_map.contains_key("stderr")); assert!(query_map.contains_key("follow")); assert!(query_map.contains_key("tail")); assert_eq!("true", query_map["follow"]); assert_eq!("all", query_map["tail"]); assert_eq!("100000", query_map["since"]); let body = vec![ 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0d, 0x52, 0x6f, 0x73, 0x65, 0x73, 0x20, 0x61, 0x72, 0x65, 0x20, 0x72, 0x65, 0x64, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x76, 0x69, 0x6f, 0x6c, 0x65, 0x74, 0x73, 0x20, 0x61, 0x72, 0x65, 0x20, 0x62, 0x6c, 0x75, 0x65, ]; Box::new(future::ok(Response::new(body.into()))) } #[test] fn container_logs_succeeds() { let dispatch_table = routes!( GET "/networks" => default_get_networks_handler(), POST "/networks/create" => default_create_network_handler(), GET "/containers/mod1/logs" => container_logs_handler, ); let (server, port) = run_tcp_server( "127.0.0.1", make_req_dispatcher(dispatch_table, Box::new(not_found_handler)), ); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| { let options = LogOptions::new() .with_follow(true) .with_tail(LogTail::All) .with_since(100_000) .with_until(200_000); runtime.logs("mod1", &options) }); let expected_body = [ 0x01_u8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0d, 0x52, 0x6f, 0x73, 0x65, 0x73, 0x20, 0x61, 0x72, 0x65, 0x20, 0x72, 0x65, 0x64, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x76, 0x69, 0x6f, 0x6c, 0x65, 0x74, 0x73, 0x20, 0x61, 0x72, 0x65, 0x20, 0x62, 0x6c, 0x75, 0x65, ]; let assert = task.and_then(Stream::concat2).and_then(|b| { assert_eq!(&expected_body[..], b.as_ref()); Ok(()) }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(assert).unwrap(); } #[test] fn image_remove_with_white_space_name_fails() { let (server, port) = run_tcp_server("127.0.0.1", default_network_handler()); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let image_name = " "; let task = DockerModuleRuntime::make_runtime(settings) .and_then(|runtime| ModuleRegistry::remove(&runtime, image_name)) .then(|res| match res { Ok(_) => Err("Expected error but got a result.".to_string()), Err(err) => match err.kind() { ErrorKind::RegistryOperation(RegistryOperation::RemoveImage(s)) if s == image_name => { Ok(()) } kind => panic!( "Expected `RegistryOperation(RemoveImage)` error but got {:?}.", kind ), }, }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[test] fn create_fails_for_non_docker_type() { let (server, port) = run_tcp_server("127.0.0.1", default_network_handler()); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let name = "not_docker"; let task = DockerModuleRuntime::make_runtime(settings) .and_then(|runtime| { let module_config = ModuleSpec::new( "m1".to_string(), name.to_string(), DockerConfig::new( "nginx:latest".to_string(), ContainerCreateBody::new(), None, None, ) .unwrap(), BTreeMap::new(), ImagePullPolicy::default(), ) .unwrap(); runtime.create(module_config) }) .then(|result| match result { Ok(_) => panic!("Expected test to fail but it didn't!"), Err(err) => match err.kind() { ErrorKind::InvalidModuleType(s) if s == name => Ok::<_, Error>(()), kind => panic!("Expected `InvalidModuleType` error but got {:?}.", kind), }, }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[test] fn start_fails_for_empty_id() { let (server, port) = run_tcp_server("127.0.0.1", default_network_handler()); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let name = ""; let task = DockerModuleRuntime::make_runtime(settings) .and_then(|runtime| runtime.start(name)) .then(|result| match result { Ok(_) => panic!("Expected test to fail but it didn't!"), Err(err) => match err.kind() { ErrorKind::RuntimeOperation(RuntimeOperation::StartModule(s)) if s == name => { Ok::<_, Error>(()) } kind => panic!( "Expected `RuntimeOperation(StartModule)` error but got {:?}.", kind ), }, }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[test] fn start_fails_for_white_space_id() { let (server, port) = run_tcp_server("127.0.0.1", default_network_handler()); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let name = " "; let task = DockerModuleRuntime::make_runtime(settings) .and_then(|runtime| runtime.start(name)) .then(|result| match result { Ok(_) => panic!("Expected test to fail but it didn't!"), Err(err) => match err.kind() { ErrorKind::RuntimeOperation(RuntimeOperation::StartModule(s)) if s == name => { Ok::<_, Error>(()) } kind => panic!( "Expected `RuntimeOperation(StartModule)` error but got {:?}.", kind ), }, }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[test] fn stop_fails_for_empty_id() { let (server, port) = run_tcp_server("127.0.0.1", default_network_handler()); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let name = ""; let task = DockerModuleRuntime::make_runtime(settings) .and_then(|runtime| runtime.stop(name, None)) .then(|result| match result { Ok(_) => panic!("Expected test to fail but it didn't!"), Err(err) => match err.kind() { ErrorKind::RuntimeOperation(RuntimeOperation::StopModule(s)) if s == name => { Ok::<_, Error>(()) } kind => panic!( "Expected `RuntimeOperation(StopModule)` error but got {:?}.", kind ), }, }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[test] fn stop_fails_for_white_space_id() { let (server, port) = run_tcp_server("127.0.0.1", default_network_handler()); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let name = " "; let task = DockerModuleRuntime::make_runtime(settings) .and_then(|runtime| runtime.stop(name, None)) .then(|result| match result { Ok(_) => panic!("Expected test to fail but it didn't!"), Err(err) => match err.kind() { ErrorKind::RuntimeOperation(RuntimeOperation::StopModule(s)) if s == name => { Ok::<_, Error>(()) } kind => panic!( "Expected `RuntimeOperation(StopModule)` error but got {:?}.", kind ), }, }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[test] fn restart_fails_for_empty_id() { let (server, port) = run_tcp_server("127.0.0.1", default_network_handler()); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let name = ""; let task = DockerModuleRuntime::make_runtime(settings) .and_then(|runtime| runtime.restart(name)) .then(|result| match result { Ok(_) => panic!("Expected test to fail but it didn't!"), Err(err) => match err.kind() { ErrorKind::RuntimeOperation(RuntimeOperation::RestartModule(s)) if s == name => { Ok::<_, Error>(()) } kind => panic!( "Expected `RuntimeOperation(RestartModule)` error but got {:?}.", kind ), }, }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[test] fn restart_fails_for_white_space_id() { let (server, port) = run_tcp_server("127.0.0.1", default_network_handler()); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let name = " "; let task = DockerModuleRuntime::make_runtime(settings) .and_then(|runtime| runtime.restart(name)) .then(|result| match result { Ok(_) => panic!("Expected test to fail but it didn't!"), Err(err) => match err.kind() { ErrorKind::RuntimeOperation(RuntimeOperation::RestartModule(s)) if s == name => { Ok::<_, Error>(()) } kind => panic!( "Expected `RuntimeOperation(RestartModule)` error but got {:?}.", kind ), }, }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[test] fn remove_fails_for_empty_id() { let (server, port) = run_tcp_server("127.0.0.1", default_network_handler()); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let name = ""; let task = DockerModuleRuntime::make_runtime(settings) .and_then(|runtime| ModuleRuntime::remove(&runtime, name)) .then(|result| match result { Ok(_) => panic!("Expected test to fail but it didn't!"), Err(err) => match err.kind() { ErrorKind::RuntimeOperation(RuntimeOperation::RemoveModule(s)) if s == name => { Ok::<_, Error>(()) } kind => panic!( "Expected `RuntimeOperation(RemoveModule)` error but got {:?}.", kind ), }, }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[test] fn remove_fails_for_white_space_id() { let (server, port) = run_tcp_server("127.0.0.1", default_network_handler()); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let name = " "; let task = DockerModuleRuntime::make_runtime(settings) .and_then(|runtime| ModuleRuntime::remove(&runtime, name)) .then(|result| match result { Ok(_) => panic!("Expected test to fail but it didn't!"), Err(err) => match err.kind() { ErrorKind::RuntimeOperation(RuntimeOperation::RemoveModule(s)) if s == name => { Ok::<_, Error>(()) } kind => panic!( "Expected `RuntimeOperation(RemoveModule)` error but got {:?}.", kind ), }, }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[test] fn get_fails_for_empty_id() { let (server, port) = run_tcp_server("127.0.0.1", default_network_handler()); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let name = ""; let task = DockerModuleRuntime::make_runtime(settings) .and_then(|runtime| runtime.get(name)) .then(|result| match result { Ok(_) => panic!("Expected test to fail but it didn't!"), Err(err) => match err.kind() { ErrorKind::RuntimeOperation(RuntimeOperation::GetModule(s)) if s == name => { Ok::<_, Error>(()) } kind => panic!( "Expected `RuntimeOperation(GetModule)` error but got {:?}.", kind ), }, }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[test] fn get_fails_for_white_space_id() { let (server, port) = run_tcp_server("127.0.0.1", default_network_handler()); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let name = " "; let task = DockerModuleRuntime::make_runtime(settings) .and_then(|runtime| runtime.get(name)) .then(|result| match result { Ok(_) => panic!("Expected test to fail but it didn't!"), Err(err) => match err.kind() { ErrorKind::RuntimeOperation(RuntimeOperation::GetModule(s)) if s == name => { Ok::<_, Error>(()) } kind => panic!( "Expected `RuntimeOperation(GetModule)` error but got {:?}.", kind ), }, }); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); } #[test] fn runtime_init_network_does_not_exist_create() { let list_got_called_lock = Arc::new(RwLock::new(false)); let list_got_called_lock_cloned = list_got_called_lock.clone(); let create_got_called_lock = Arc::new(RwLock::new(false)); let create_got_called_lock_cloned = create_got_called_lock.clone(); let network_handler = make_network_handler( move || { let mut list_got_called_w = list_got_called_lock.write().unwrap(); *list_got_called_w = true; json!([]).to_string() }, move |_| { let mut create_got_called_w = create_got_called_lock.write().unwrap(); *create_got_called_w = true; }, ); let (server, port) = run_tcp_server("127.0.0.1", network_handler); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); //act let task = DockerModuleRuntime::make_runtime(settings); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); //assert assert_eq!(true, *list_got_called_lock_cloned.read().unwrap()); assert_eq!(true, *create_got_called_lock_cloned.read().unwrap()); } #[test] fn network_ipv6_create() { let list_got_called_lock = Arc::new(RwLock::new(false)); let list_got_called_lock_cloned = list_got_called_lock.clone(); let create_got_called_lock = Arc::new(RwLock::new(false)); let create_got_called_lock_cloned = create_got_called_lock.clone(); let network_handler = make_network_handler( move || { let mut list_got_called_w = list_got_called_lock.write().unwrap(); *list_got_called_w = true; json!([]).to_string() }, move |req| { let mut create_got_called_w = create_got_called_lock.write().unwrap(); *create_got_called_w = true; let task = req .into_body() .concat2() .map(|body| { let network: NetworkConfig = serde_json::from_slice(&body).unwrap(); assert_eq!("my-network", network.name().as_str()); let ipam_config = network.IPAM().unwrap().config().unwrap(); let ipam_config_0 = ipam_config.get(0).unwrap(); assert_eq!(ipam_config_0["Gateway"], "172.18.0.1"); assert_eq!(ipam_config_0["Subnet"], "172.18.0.0/16"); assert_eq!(ipam_config_0["IPRange"], "172.18.0.0/16"); let ipam_config_1 = ipam_config.get(1).unwrap(); assert_eq!(ipam_config_1["Gateway"], "172.20.0.1"); assert_eq!(ipam_config_1["Subnet"], "172.20.0.0/16"); assert_eq!(ipam_config_1["IPRange"], "172.20.0.0/24"); }) .map_err(|err| panic!("{:?}", err)); tokio::spawn(task).into_future().wait().unwrap(); }, ); let (server, port) = run_tcp_server("127.0.0.1", network_handler); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" [moby_runtime.network] name = "my-network" ipv6 = true [[moby_runtime.network.ipam.config]] gateway = "172.18.0.1" subnet = "172.18.0.0/16" ip_range = "172.18.0.0/16" [[moby_runtime.network.ipam.config]] gateway = "172.20.0.1" subnet = "172.20.0.0/16" ip_range = "172.20.0.0/24" "#, port )); //act let task = DockerModuleRuntime::make_runtime(settings); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); //assert assert_eq!(true, *list_got_called_lock_cloned.read().unwrap()); assert_eq!(true, *create_got_called_lock_cloned.read().unwrap()); } #[test] fn runtime_init_network_exist_do_not_create() { let list_got_called_lock = Arc::new(RwLock::new(false)); let list_got_called_lock_cloned = list_got_called_lock.clone(); let create_got_called_lock = Arc::new(RwLock::new(false)); let create_got_called_lock_cloned = create_got_called_lock.clone(); let network_handler = make_network_handler( move || { let mut list_got_called_w = list_got_called_lock.write().unwrap(); *list_got_called_w = true; json!([ { "Name": "azure-iot-edge", "Id": "8e3209d08ed5e73d1c9c8e7580ddad232b6dceb5bf0c6d74cadbed75422eef0e", "Created": "0001-01-01T00:00:00Z", "Scope": "local", "Driver": "bridge", "EnableIPv6": false, "Internal": false, "Attachable": false, "Ingress": false, "IPAM": { "Driver": "bridge", "Config": [] }, "Containers": {}, "Options": {} } ]) .to_string() }, move |_| { let mut create_got_called_w = create_got_called_lock.write().unwrap(); *create_got_called_w = true; }, ); let (server, port) = run_tcp_server("127.0.0.1", network_handler); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); //act let task = DockerModuleRuntime::make_runtime(settings); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); runtime.block_on(task).unwrap(); //assert assert_eq!(true, *list_got_called_lock_cloned.read().unwrap()); assert_eq!(false, *create_got_called_lock_cloned.read().unwrap()); } #[test] fn runtime_system_info_succeeds() { let system_info_got_called_lock = Arc::new(RwLock::new(false)); let system_info_got_called_lock_cloned = system_info_got_called_lock.clone(); let on_system_info = move |req: Request<Body>| { let mut system_info_got_called_w = system_info_got_called_lock.write().unwrap(); *system_info_got_called_w = true; assert_eq!(req.uri().path(), "/info"); let response = json!( { "OSType": "linux", "Architecture": "x86_64", } ) .to_string(); let response_len = response.len(); let mut response = Response::new(response.into()); response .headers_mut() .typed_insert(&ContentLength(response_len as u64)); response .headers_mut() .typed_insert(&ContentType(mime::APPLICATION_JSON)); Box::new(future::ok(response)) as ResponseFuture }; let dispatch_table = routes!( GET "/networks" => default_get_networks_handler(), POST "/networks/create" => default_create_network_handler(), GET "/info" => on_system_info, ); //act let (server, port) = run_tcp_server( "127.0.0.1", make_req_dispatcher(dispatch_table, Box::new(not_found_handler)), ); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| runtime.system_info()); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); let system_info = runtime.block_on(task).unwrap(); //assert assert_eq!(true, *system_info_got_called_lock_cloned.read().unwrap()); assert_eq!("linux", system_info.os_type); assert_eq!("x86_64", system_info.architecture); } #[test] fn runtime_system_info_none_returns_unkown() { let system_info_got_called_lock = Arc::new(RwLock::new(false)); let system_info_got_called_lock_cloned = system_info_got_called_lock.clone(); let on_system_info = move |req: Request<Body>| { let mut system_info_got_called_w = system_info_got_called_lock.write().unwrap(); *system_info_got_called_w = true; assert_eq!(req.uri().path(), "/info"); let response = json!({}).to_string(); let response_len = response.len(); let mut response = Response::new(response.into()); response .headers_mut() .typed_insert(&ContentLength(response_len as u64)); response .headers_mut() .typed_insert(&ContentType(mime::APPLICATION_JSON)); Box::new(future::ok(response)) as ResponseFuture }; let dispatch_table = routes!( GET "/networks" => default_get_networks_handler(), POST "/networks/create" => default_create_network_handler(), GET "/info" => on_system_info, ); //act let (server, port) = run_tcp_server( "127.0.0.1", make_req_dispatcher(dispatch_table, Box::new(not_found_handler)), ); let server = server.map_err(|err| panic!(err)); let settings = make_settings(&format!( r#" [moby_runtime] uri = "http://localhost:{}" network = "azure-iot-edge" "#, port )); let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| runtime.system_info()); let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); runtime.spawn(server); let system_info = runtime.block_on(task).unwrap(); //assert assert_eq!(true, *system_info_got_called_lock_cloned.read().unwrap()); assert_eq!("Unknown", system_info.os_type); assert_eq!("Unknown", system_info.architecture); }
31.330362
104
0.581336
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn image_pull_with_invalid_image_name_fails() {\n let dispatch_table = routes!(\n GET \"/networks\" => default_get_networks_handler(),\n POST \"/networks/create\" => default_create_network_handler(),\n POST \"/images/create\" => invalid_image_name_pull_handler,\n );\n\n let (server, port) = run_tcp_server(\n \"127.0.0.1\",\n make_req_dispatcher(dispatch_table, Box::new(not_found_handler)),\n );\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| {\n let auth = AuthConfig::new()\n .with_username(\"u1\".to_string())\n .with_password(\"bleh\".to_string())\n .with_email(\"u1@bleh.com\".to_string())\n .with_serveraddress(\"svr1\".to_string());\n let config = DockerConfig::new(\n INVALID_IMAGE_NAME.to_string(),\n ContainerCreateBody::new(),\n None,\n Some(auth),\n )\n .unwrap();\n\n runtime.pull(&config)\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n\n // Assert\n let err = runtime\n .block_on(task)\n .expect_err(\"Expected runtime pull method to fail due to invalid image name.\");\n\n match (err.kind(), err.cause().and_then(Fail::downcast_ref)) {\n (\n edgelet_docker::ErrorKind::RegistryOperation(\n edgelet_core::RegistryOperation::PullImage(name),\n ),\n Some(edgelet_docker::ErrorKind::NotFound(message)),\n ) if name == INVALID_IMAGE_NAME => {\n assert_eq!(\n &format!(\"manifest for {} not found\", INVALID_IMAGE_NAME),\n message\n );\n }\n\n _ => panic!(\n \"Specific docker runtime message is expected for invalid image name. Got {:?}\",\n err.kind()\n ),\n }\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn image_pull_with_invalid_image_host_fails() {\n let dispatch_table = routes!(\n GET \"/networks\" => default_get_networks_handler(),\n POST \"/networks/create\" => default_create_network_handler(),\n POST \"/images/create\" => invalid_image_host_pull_handler,\n );\n\n let (server, port) = run_tcp_server(\n \"127.0.0.1\",\n make_req_dispatcher(dispatch_table, Box::new(not_found_handler)),\n );\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| {\n let auth = AuthConfig::new()\n .with_username(\"u1\".to_string())\n .with_password(\"bleh\".to_string())\n .with_email(\"u1@bleh.com\".to_string())\n .with_serveraddress(\"svr1\".to_string());\n let config = DockerConfig::new(\n INVALID_IMAGE_HOST.to_string(),\n ContainerCreateBody::new(),\n None,\n Some(auth),\n )\n .unwrap();\n\n runtime.pull(&config)\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n\n // Assert\n let err = runtime\n .block_on(task)\n .expect_err(\"Expected runtime pull method to fail due to invalid image host.\");\n\n match (err.kind(), err.cause().and_then(Fail::downcast_ref)) {\n (\n edgelet_docker::ErrorKind::RegistryOperation(\n edgelet_core::RegistryOperation::PullImage(name),\n ),\n Some(edgelet_docker::ErrorKind::FormattedDockerRuntime(message)),\n ) if name == INVALID_IMAGE_HOST => {\n assert_eq!(\n &format!(\n \"Get https://invalidhost.com: dial tcp: lookup {} on X.X.X.X: no such host\",\n INVALID_IMAGE_HOST\n ),\n message\n );\n }\n\n _ => panic!(\n \"Specific docker runtime message is expected for invalid image host. Got {:?}\",\n err.kind()\n ),\n }\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn image_pull_with_invalid_creds_fails() {\n let dispatch_table = routes!(\n GET \"/networks\" => default_get_networks_handler(),\n POST \"/networks/create\" => default_create_network_handler(),\n POST \"/images/create\" => image_pull_with_invalid_creds_handler,\n );\n\n let (server, port) = run_tcp_server(\n \"127.0.0.1\",\n make_req_dispatcher(dispatch_table, Box::new(not_found_handler)),\n );\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| {\n // password is written to guarantee base64 encoding has '-' and/or '_'\n let auth = AuthConfig::new()\n .with_username(\"us1\".to_string())\n .with_password(\"ac?ac~aaac???\".to_string())\n .with_email(\"u1@bleh.com\".to_string())\n .with_serveraddress(\"svr1\".to_string());\n let config = DockerConfig::new(\n IMAGE_NAME.to_string(),\n ContainerCreateBody::new(),\n None,\n Some(auth),\n )\n .unwrap();\n\n runtime.pull(&config)\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n\n // Assert\n let err = runtime\n .block_on(task)\n .expect_err(\"Expected runtime pull method to fail due to unauthentication.\");\n\n match (err.kind(), err.cause().and_then(Fail::downcast_ref)) {\n (\n edgelet_docker::ErrorKind::RegistryOperation(\n edgelet_core::RegistryOperation::PullImage(name),\n ),\n Some(edgelet_docker::ErrorKind::FormattedDockerRuntime(message)),\n ) if name == IMAGE_NAME => {\n assert_eq!(\n &format!(\n \"Get {}: unauthorized: authentication required\",\n &IMAGE_NAME.to_string()\n ),\n message\n );\n }\n\n _ => panic!(\n \"Specific docker runtime message is expected for unauthentication. Got {:?}\",\n err.kind()\n ),\n }\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn image_pull_succeeds() {\n let dispatch_table = routes!(\n GET \"/networks\" => default_get_networks_handler(),\n POST \"/networks/create\" => default_create_network_handler(),\n POST \"/images/create\" => image_pull_handler,\n );\n\n let (server, port) = run_tcp_server(\n \"127.0.0.1\",\n make_req_dispatcher(dispatch_table, Box::new(not_found_handler)),\n );\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| {\n let auth = AuthConfig::new()\n .with_username(\"u1\".to_string())\n .with_password(\"bleh\".to_string())\n .with_email(\"u1@bleh.com\".to_string())\n .with_serveraddress(\"svr1\".to_string());\n let config = DockerConfig::new(\n IMAGE_NAME.to_string(),\n ContainerCreateBody::new(),\n None,\n Some(auth),\n )\n .unwrap();\n\n runtime.pull(&config)\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn image_pull_with_creds_succeeds() {\n let dispatch_table = routes!(\n GET \"/networks\" => default_get_networks_handler(),\n POST \"/networks/create\" => default_create_network_handler(),\n POST \"/images/create\" => image_pull_with_creds_handler,\n );\n\n let (server, port) = run_tcp_server(\n \"127.0.0.1\",\n make_req_dispatcher(dispatch_table, Box::new(not_found_handler)),\n );\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| {\n let auth = AuthConfig::new()\n .with_username(\"u1\".to_string())\n .with_password(\"bleh\".to_string())\n .with_email(\"u1@bleh.com\".to_string())\n .with_serveraddress(\"svr1\".to_string());\n let config = DockerConfig::new(\n IMAGE_NAME.to_string(),\n ContainerCreateBody::new(),\n None,\n Some(auth),\n )\n .unwrap();\n\n runtime.pull(&config)\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn image_remove_succeeds() {\n let dispatch_table = routes!(\n GET \"/networks\" => default_get_networks_handler(),\n POST \"/networks/create\" => default_create_network_handler(),\n DELETE format!(\"/images/{}\", IMAGE_NAME) => image_remove_handler,\n );\n\n let (server, port) = run_tcp_server(\n \"127.0.0.1\",\n make_req_dispatcher(dispatch_table, Box::new(not_found_handler)),\n );\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let task = DockerModuleRuntime::make_runtime(settings)\n .and_then(|runtime| ModuleRegistry::remove(&runtime, IMAGE_NAME));\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn container_create_succeeds() {\n let dispatch_table = routes!(\n GET \"/networks\" => default_get_networks_handler(),\n POST \"/networks/create\" => default_create_network_handler(),\n POST \"/containers/create\" => container_create_handler,\n );\n\n let (server, port) = run_tcp_server(\n \"127.0.0.1\",\n make_req_dispatcher(dispatch_table, Box::new(not_found_handler)),\n );\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| {\n let mut env = BTreeMap::new();\n env.insert(\"k1\".to_string(), \"v1\".to_string());\n env.insert(\"k2\".to_string(), \"v2\".to_string());\n env.insert(\"k3\".to_string(), \"v3\".to_string());\n\n // add some create options\n let mut port_bindings = BTreeMap::new();\n port_bindings.insert(\n \"22/tcp\".to_string(),\n vec![HostConfigPortBindings::new().with_host_port(\"11022\".to_string())],\n );\n port_bindings.insert(\n \"80/tcp\".to_string(),\n vec![HostConfigPortBindings::new().with_host_port(\"8080\".to_string())],\n );\n let memory: i64 = 3_221_225_472;\n let mut volumes = ::std::collections::BTreeMap::new();\n volumes.insert(\"test1\".to_string(), json!({}));\n let create_options = ContainerCreateBody::new()\n .with_host_config(\n HostConfig::new()\n .with_port_bindings(port_bindings)\n .with_memory(memory),\n )\n .with_cmd(vec![\n \"/do/the/custom/command\".to_string(),\n \"with these args\".to_string(),\n ])\n .with_entrypoint(vec![\n \"/also/do/the/entrypoint\".to_string(),\n \"and this\".to_string(),\n ])\n .with_env(vec![\"k4=v4\".to_string(), \"k5=v5\".to_string()])\n .with_volumes(volumes);\n\n let module_config = ModuleSpec::new(\n \"m1\".to_string(),\n \"docker\".to_string(),\n DockerConfig::new(\"nginx:latest\".to_string(), create_options, None, None).unwrap(),\n env,\n ImagePullPolicy::default(),\n )\n .unwrap();\n\n runtime.create(module_config)\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn container_start_succeeds() {\n let dispatch_table = routes!(\n GET \"/networks\" => default_get_networks_handler(),\n POST \"/networks/create\" => default_create_network_handler(),\n POST \"/containers/m1/start\" => container_start_handler,\n );\n\n let (server, port) = run_tcp_server(\n \"127.0.0.1\",\n make_req_dispatcher(dispatch_table, Box::new(not_found_handler)),\n );\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| runtime.start(\"m1\"));\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn container_stop_succeeds() {\n let dispatch_table = routes!(\n GET \"/networks\" => default_get_networks_handler(),\n POST \"/networks/create\" => default_create_network_handler(),\n POST \"/containers/m1/stop\" => container_stop_handler,\n );\n\n let (server, port) = run_tcp_server(\n \"127.0.0.1\",\n make_req_dispatcher(dispatch_table, Box::new(not_found_handler)),\n );\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let task =\n DockerModuleRuntime::make_runtime(settings).and_then(|runtime| runtime.stop(\"m1\", None));\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn container_stop_with_timeout_succeeds() {\n let dispatch_table = routes!(\n GET \"/networks\" => default_get_networks_handler(),\n POST \"/networks/create\" => default_create_network_handler(),\n POST \"/containers/m1/stop\" => container_stop_with_timeout_handler,\n );\n\n let (server, port) = run_tcp_server(\n \"127.0.0.1\",\n make_req_dispatcher(dispatch_table, Box::new(not_found_handler)),\n );\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let task = DockerModuleRuntime::make_runtime(settings)\n .and_then(|runtime| runtime.stop(\"m1\", Some(Duration::from_secs(600))));\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn container_remove_succeeds() {\n let dispatch_table = routes!(\n GET \"/networks\" => default_get_networks_handler(),\n POST \"/networks/create\" => default_create_network_handler(),\n DELETE \"/containers/m1\" => container_remove_handler,\n );\n\n let (server, port) = run_tcp_server(\n \"127.0.0.1\",\n make_req_dispatcher(dispatch_table, Box::new(not_found_handler)),\n );\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let task = DockerModuleRuntime::make_runtime(settings)\n .and_then(|runtime| ModuleRuntime::remove(&runtime, \"m1\"));\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn container_list_succeeds() {\n let dispatch_table = routes!(\n GET \"/networks\" => default_get_networks_handler(),\n POST \"/networks/create\" => default_create_network_handler(),\n GET \"/containers/json\" => container_list_handler,\n );\n\n let (server, port) = run_tcp_server(\n \"127.0.0.1\",\n make_req_dispatcher(dispatch_table, Box::new(not_found_handler)),\n );\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| runtime.list());\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n let modules = runtime.block_on(task).unwrap();\n\n assert_eq!(3, modules.len());\n\n assert_eq!(\"m1\", modules[0].name());\n assert_eq!(\"m2\", modules[1].name());\n assert_eq!(\"m3\", modules[2].name());\n\n assert_eq!(\"img1\", modules[0].config().image_id().unwrap());\n assert_eq!(\"img2\", modules[1].config().image_id().unwrap());\n assert_eq!(\"img3\", modules[2].config().image_id().unwrap());\n\n assert_eq!(\"nginx:latest\", modules[0].config().image());\n assert_eq!(\"ubuntu:latest\", modules[1].config().image());\n assert_eq!(\"mongo:latest\", modules[2].config().image());\n\n for module in modules {\n for i in 0..3 {\n assert_eq!(\n module\n .config()\n .create_options()\n .labels()\n .unwrap()\n .get(&format!(\"l{}\", i + 1)),\n Some(&format!(\"v{}\", i + 1))\n );\n }\n }\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn container_logs_succeeds() {\n let dispatch_table = routes!(\n GET \"/networks\" => default_get_networks_handler(),\n POST \"/networks/create\" => default_create_network_handler(),\n GET \"/containers/mod1/logs\" => container_logs_handler,\n );\n\n let (server, port) = run_tcp_server(\n \"127.0.0.1\",\n make_req_dispatcher(dispatch_table, Box::new(not_found_handler)),\n );\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let task = DockerModuleRuntime::make_runtime(settings).and_then(|runtime| {\n let options = LogOptions::new()\n .with_follow(true)\n .with_tail(LogTail::All)\n .with_since(100_000)\n .with_until(200_000);\n\n runtime.logs(\"mod1\", &options)\n });\n\n let expected_body = [\n 0x01_u8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0d, 0x52, 0x6f, 0x73, 0x65, 0x73, 0x20,\n 0x61, 0x72, 0x65, 0x20, 0x72, 0x65, 0x64, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10,\n 0x76, 0x69, 0x6f, 0x6c, 0x65, 0x74, 0x73, 0x20, 0x61, 0x72, 0x65, 0x20, 0x62, 0x6c, 0x75,\n 0x65,\n ];\n\n let assert = task.and_then(Stream::concat2).and_then(|b| {\n assert_eq!(&expected_body[..], b.as_ref());\n Ok(())\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(assert).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn image_remove_with_white_space_name_fails() {\n let (server, port) = run_tcp_server(\"127.0.0.1\", default_network_handler());\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let image_name = \" \";\n\n let task = DockerModuleRuntime::make_runtime(settings)\n .and_then(|runtime| ModuleRegistry::remove(&runtime, image_name))\n .then(|res| match res {\n Ok(_) => Err(\"Expected error but got a result.\".to_string()),\n Err(err) => match err.kind() {\n ErrorKind::RegistryOperation(RegistryOperation::RemoveImage(s))\n if s == image_name =>\n {\n Ok(())\n }\n kind => panic!(\n \"Expected `RegistryOperation(RemoveImage)` error but got {:?}.\",\n kind\n ),\n },\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn create_fails_for_non_docker_type() {\n let (server, port) = run_tcp_server(\"127.0.0.1\", default_network_handler());\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let name = \"not_docker\";\n\n let task = DockerModuleRuntime::make_runtime(settings)\n .and_then(|runtime| {\n let module_config = ModuleSpec::new(\n \"m1\".to_string(),\n name.to_string(),\n DockerConfig::new(\n \"nginx:latest\".to_string(),\n ContainerCreateBody::new(),\n None,\n None,\n )\n .unwrap(),\n BTreeMap::new(),\n ImagePullPolicy::default(),\n )\n .unwrap();\n\n runtime.create(module_config)\n })\n .then(|result| match result {\n Ok(_) => panic!(\"Expected test to fail but it didn't!\"),\n Err(err) => match err.kind() {\n ErrorKind::InvalidModuleType(s) if s == name => Ok::<_, Error>(()),\n kind => panic!(\"Expected `InvalidModuleType` error but got {:?}.\", kind),\n },\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn start_fails_for_empty_id() {\n let (server, port) = run_tcp_server(\"127.0.0.1\", default_network_handler());\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let name = \"\";\n\n let task = DockerModuleRuntime::make_runtime(settings)\n .and_then(|runtime| runtime.start(name))\n .then(|result| match result {\n Ok(_) => panic!(\"Expected test to fail but it didn't!\"),\n Err(err) => match err.kind() {\n ErrorKind::RuntimeOperation(RuntimeOperation::StartModule(s)) if s == name => {\n Ok::<_, Error>(())\n }\n kind => panic!(\n \"Expected `RuntimeOperation(StartModule)` error but got {:?}.\",\n kind\n ),\n },\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn start_fails_for_white_space_id() {\n let (server, port) = run_tcp_server(\"127.0.0.1\", default_network_handler());\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let name = \" \";\n\n let task = DockerModuleRuntime::make_runtime(settings)\n .and_then(|runtime| runtime.start(name))\n .then(|result| match result {\n Ok(_) => panic!(\"Expected test to fail but it didn't!\"),\n Err(err) => match err.kind() {\n ErrorKind::RuntimeOperation(RuntimeOperation::StartModule(s)) if s == name => {\n Ok::<_, Error>(())\n }\n kind => panic!(\n \"Expected `RuntimeOperation(StartModule)` error but got {:?}.\",\n kind\n ),\n },\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn stop_fails_for_empty_id() {\n let (server, port) = run_tcp_server(\"127.0.0.1\", default_network_handler());\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let name = \"\";\n\n let task = DockerModuleRuntime::make_runtime(settings)\n .and_then(|runtime| runtime.stop(name, None))\n .then(|result| match result {\n Ok(_) => panic!(\"Expected test to fail but it didn't!\"),\n Err(err) => match err.kind() {\n ErrorKind::RuntimeOperation(RuntimeOperation::StopModule(s)) if s == name => {\n Ok::<_, Error>(())\n }\n kind => panic!(\n \"Expected `RuntimeOperation(StopModule)` error but got {:?}.\",\n kind\n ),\n },\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn stop_fails_for_white_space_id() {\n let (server, port) = run_tcp_server(\"127.0.0.1\", default_network_handler());\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let name = \" \";\n\n let task = DockerModuleRuntime::make_runtime(settings)\n .and_then(|runtime| runtime.stop(name, None))\n .then(|result| match result {\n Ok(_) => panic!(\"Expected test to fail but it didn't!\"),\n Err(err) => match err.kind() {\n ErrorKind::RuntimeOperation(RuntimeOperation::StopModule(s)) if s == name => {\n Ok::<_, Error>(())\n }\n kind => panic!(\n \"Expected `RuntimeOperation(StopModule)` error but got {:?}.\",\n kind\n ),\n },\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn restart_fails_for_empty_id() {\n let (server, port) = run_tcp_server(\"127.0.0.1\", default_network_handler());\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let name = \"\";\n\n let task = DockerModuleRuntime::make_runtime(settings)\n .and_then(|runtime| runtime.restart(name))\n .then(|result| match result {\n Ok(_) => panic!(\"Expected test to fail but it didn't!\"),\n Err(err) => match err.kind() {\n ErrorKind::RuntimeOperation(RuntimeOperation::RestartModule(s)) if s == name => {\n Ok::<_, Error>(())\n }\n kind => panic!(\n \"Expected `RuntimeOperation(RestartModule)` error but got {:?}.\",\n kind\n ),\n },\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn restart_fails_for_white_space_id() {\n let (server, port) = run_tcp_server(\"127.0.0.1\", default_network_handler());\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let name = \" \";\n\n let task = DockerModuleRuntime::make_runtime(settings)\n .and_then(|runtime| runtime.restart(name))\n .then(|result| match result {\n Ok(_) => panic!(\"Expected test to fail but it didn't!\"),\n Err(err) => match err.kind() {\n ErrorKind::RuntimeOperation(RuntimeOperation::RestartModule(s)) if s == name => {\n Ok::<_, Error>(())\n }\n kind => panic!(\n \"Expected `RuntimeOperation(RestartModule)` error but got {:?}.\",\n kind\n ),\n },\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn remove_fails_for_empty_id() {\n let (server, port) = run_tcp_server(\"127.0.0.1\", default_network_handler());\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let name = \"\";\n\n let task = DockerModuleRuntime::make_runtime(settings)\n .and_then(|runtime| ModuleRuntime::remove(&runtime, name))\n .then(|result| match result {\n Ok(_) => panic!(\"Expected test to fail but it didn't!\"),\n Err(err) => match err.kind() {\n ErrorKind::RuntimeOperation(RuntimeOperation::RemoveModule(s)) if s == name => {\n Ok::<_, Error>(())\n }\n kind => panic!(\n \"Expected `RuntimeOperation(RemoveModule)` error but got {:?}.\",\n kind\n ),\n },\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn remove_fails_for_white_space_id() {\n let (server, port) = run_tcp_server(\"127.0.0.1\", default_network_handler());\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let name = \" \";\n\n let task = DockerModuleRuntime::make_runtime(settings)\n .and_then(|runtime| ModuleRuntime::remove(&runtime, name))\n .then(|result| match result {\n Ok(_) => panic!(\"Expected test to fail but it didn't!\"),\n Err(err) => match err.kind() {\n ErrorKind::RuntimeOperation(RuntimeOperation::RemoveModule(s)) if s == name => {\n Ok::<_, Error>(())\n }\n kind => panic!(\n \"Expected `RuntimeOperation(RemoveModule)` error but got {:?}.\",\n kind\n ),\n },\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn get_fails_for_empty_id() {\n let (server, port) = run_tcp_server(\"127.0.0.1\", default_network_handler());\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let name = \"\";\n\n let task = DockerModuleRuntime::make_runtime(settings)\n .and_then(|runtime| runtime.get(name))\n .then(|result| match result {\n Ok(_) => panic!(\"Expected test to fail but it didn't!\"),\n Err(err) => match err.kind() {\n ErrorKind::RuntimeOperation(RuntimeOperation::GetModule(s)) if s == name => {\n Ok::<_, Error>(())\n }\n kind => panic!(\n \"Expected `RuntimeOperation(GetModule)` error but got {:?}.\",\n kind\n ),\n },\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn get_fails_for_white_space_id() {\n let (server, port) = run_tcp_server(\"127.0.0.1\", default_network_handler());\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let name = \" \";\n\n let task = DockerModuleRuntime::make_runtime(settings)\n .and_then(|runtime| runtime.get(name))\n .then(|result| match result {\n Ok(_) => panic!(\"Expected test to fail but it didn't!\"),\n Err(err) => match err.kind() {\n ErrorKind::RuntimeOperation(RuntimeOperation::GetModule(s)) if s == name => {\n Ok::<_, Error>(())\n }\n kind => panic!(\n \"Expected `RuntimeOperation(GetModule)` error but got {:?}.\",\n kind\n ),\n },\n });\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn runtime_init_network_does_not_exist_create() {\n let list_got_called_lock = Arc::new(RwLock::new(false));\n let list_got_called_lock_cloned = list_got_called_lock.clone();\n\n let create_got_called_lock = Arc::new(RwLock::new(false));\n let create_got_called_lock_cloned = create_got_called_lock.clone();\n\n let network_handler = make_network_handler(\n move || {\n let mut list_got_called_w = list_got_called_lock.write().unwrap();\n *list_got_called_w = true;\n\n json!([]).to_string()\n },\n move |_| {\n let mut create_got_called_w = create_got_called_lock.write().unwrap();\n *create_got_called_w = true;\n },\n );\n\n let (server, port) = run_tcp_server(\"127.0.0.1\", network_handler);\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n //act\n let task = DockerModuleRuntime::make_runtime(settings);\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n\n //assert\n assert_eq!(true, *list_got_called_lock_cloned.read().unwrap());\n assert_eq!(true, *create_got_called_lock_cloned.read().unwrap());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn network_ipv6_create() {\n let list_got_called_lock = Arc::new(RwLock::new(false));\n let list_got_called_lock_cloned = list_got_called_lock.clone();\n\n let create_got_called_lock = Arc::new(RwLock::new(false));\n let create_got_called_lock_cloned = create_got_called_lock.clone();\n\n let network_handler = make_network_handler(\n move || {\n let mut list_got_called_w = list_got_called_lock.write().unwrap();\n *list_got_called_w = true;\n\n json!([]).to_string()\n },\n move |req| {\n let mut create_got_called_w = create_got_called_lock.write().unwrap();\n *create_got_called_w = true;\n\n let task = req\n .into_body()\n .concat2()\n .map(|body| {\n let network: NetworkConfig = serde_json::from_slice(&body).unwrap();\n assert_eq!(\"my-network\", network.name().as_str());\n let ipam_config = network.IPAM().unwrap().config().unwrap();\n\n let ipam_config_0 = ipam_config.get(0).unwrap();\n assert_eq!(ipam_config_0[\"Gateway\"], \"172.18.0.1\");\n assert_eq!(ipam_config_0[\"Subnet\"], \"172.18.0.0/16\");\n assert_eq!(ipam_config_0[\"IPRange\"], \"172.18.0.0/16\");\n\n let ipam_config_1 = ipam_config.get(1).unwrap();\n assert_eq!(ipam_config_1[\"Gateway\"], \"172.20.0.1\");\n assert_eq!(ipam_config_1[\"Subnet\"], \"172.20.0.0/16\");\n assert_eq!(ipam_config_1[\"IPRange\"], \"172.20.0.0/24\");\n })\n .map_err(|err| panic!(\"{:?}\", err));\n\n tokio::spawn(task).into_future().wait().unwrap();\n },\n );\n\n let (server, port) = run_tcp_server(\"127.0.0.1\", network_handler);\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\n\n[moby_runtime.network]\nname = \"my-network\"\nipv6 = true\n\n[[moby_runtime.network.ipam.config]]\ngateway = \"172.18.0.1\"\nsubnet = \"172.18.0.0/16\"\nip_range = \"172.18.0.0/16\"\n\n[[moby_runtime.network.ipam.config]]\ngateway = \"172.20.0.1\"\nsubnet = \"172.20.0.0/16\"\nip_range = \"172.20.0.0/24\"\n\"#,\n port\n ));\n\n //act\n let task = DockerModuleRuntime::make_runtime(settings);\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n\n //assert\n assert_eq!(true, *list_got_called_lock_cloned.read().unwrap());\n assert_eq!(true, *create_got_called_lock_cloned.read().unwrap());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn runtime_init_network_exist_do_not_create() {\n let list_got_called_lock = Arc::new(RwLock::new(false));\n let list_got_called_lock_cloned = list_got_called_lock.clone();\n\n let create_got_called_lock = Arc::new(RwLock::new(false));\n let create_got_called_lock_cloned = create_got_called_lock.clone();\n\n let network_handler = make_network_handler(\n move || {\n let mut list_got_called_w = list_got_called_lock.write().unwrap();\n *list_got_called_w = true;\n\n json!([\n {\n \"Name\": \"azure-iot-edge\",\n \"Id\": \"8e3209d08ed5e73d1c9c8e7580ddad232b6dceb5bf0c6d74cadbed75422eef0e\",\n \"Created\": \"0001-01-01T00:00:00Z\",\n \"Scope\": \"local\",\n \"Driver\": \"bridge\",\n \"EnableIPv6\": false,\n \"Internal\": false,\n \"Attachable\": false,\n \"Ingress\": false,\n \"IPAM\": {\n \"Driver\": \"bridge\",\n \"Config\": []\n },\n \"Containers\": {},\n \"Options\": {}\n }\n ])\n .to_string()\n },\n move |_| {\n let mut create_got_called_w = create_got_called_lock.write().unwrap();\n *create_got_called_w = true;\n },\n );\n\n let (server, port) = run_tcp_server(\"127.0.0.1\", network_handler);\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n //act\n let task = DockerModuleRuntime::make_runtime(settings);\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n runtime.block_on(task).unwrap();\n\n //assert\n assert_eq!(true, *list_got_called_lock_cloned.read().unwrap());\n assert_eq!(false, *create_got_called_lock_cloned.read().unwrap());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn runtime_system_info_succeeds() {\n let system_info_got_called_lock = Arc::new(RwLock::new(false));\n let system_info_got_called_lock_cloned = system_info_got_called_lock.clone();\n\n let on_system_info = move |req: Request<Body>| {\n let mut system_info_got_called_w = system_info_got_called_lock.write().unwrap();\n *system_info_got_called_w = true;\n\n assert_eq!(req.uri().path(), \"/info\");\n\n let response = json!(\n {\n \"OSType\": \"linux\",\n \"Architecture\": \"x86_64\",\n }\n )\n .to_string();\n let response_len = response.len();\n\n let mut response = Response::new(response.into());\n response\n .headers_mut()\n .typed_insert(&ContentLength(response_len as u64));\n response\n .headers_mut()\n .typed_insert(&ContentType(mime::APPLICATION_JSON));\n\n Box::new(future::ok(response)) as ResponseFuture\n };\n\n let dispatch_table = routes!(\n GET \"/networks\" => default_get_networks_handler(),\n POST \"/networks/create\" => default_create_network_handler(),\n GET \"/info\" => on_system_info,\n );\n\n //act\n let (server, port) = run_tcp_server(\n \"127.0.0.1\",\n make_req_dispatcher(dispatch_table, Box::new(not_found_handler)),\n );\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let task =\n DockerModuleRuntime::make_runtime(settings).and_then(|runtime| runtime.system_info());\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n let system_info = runtime.block_on(task).unwrap();\n\n //assert\n assert_eq!(true, *system_info_got_called_lock_cloned.read().unwrap());\n assert_eq!(\"linux\", system_info.os_type);\n assert_eq!(\"x86_64\", system_info.architecture);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn runtime_system_info_none_returns_unkown() {\n let system_info_got_called_lock = Arc::new(RwLock::new(false));\n let system_info_got_called_lock_cloned = system_info_got_called_lock.clone();\n\n let on_system_info = move |req: Request<Body>| {\n let mut system_info_got_called_w = system_info_got_called_lock.write().unwrap();\n *system_info_got_called_w = true;\n\n assert_eq!(req.uri().path(), \"/info\");\n\n let response = json!({}).to_string();\n let response_len = response.len();\n\n let mut response = Response::new(response.into());\n response\n .headers_mut()\n .typed_insert(&ContentLength(response_len as u64));\n response\n .headers_mut()\n .typed_insert(&ContentType(mime::APPLICATION_JSON));\n\n Box::new(future::ok(response)) as ResponseFuture\n };\n\n let dispatch_table = routes!(\n GET \"/networks\" => default_get_networks_handler(),\n POST \"/networks/create\" => default_create_network_handler(),\n GET \"/info\" => on_system_info,\n );\n\n //act\n let (server, port) = run_tcp_server(\n \"127.0.0.1\",\n make_req_dispatcher(dispatch_table, Box::new(not_found_handler)),\n );\n let server = server.map_err(|err| panic!(err));\n\n let settings = make_settings(&format!(\n r#\"\n[moby_runtime]\nuri = \"http://localhost:{}\"\nnetwork = \"azure-iot-edge\"\n\"#,\n port\n ));\n\n let task =\n DockerModuleRuntime::make_runtime(settings).and_then(|runtime| runtime.system_info());\n\n let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap();\n runtime.spawn(server);\n let system_info = runtime.block_on(task).unwrap();\n\n //assert\n assert_eq!(true, *system_info_got_called_lock_cloned.read().unwrap());\n assert_eq!(\"Unknown\", system_info.os_type);\n assert_eq!(\"Unknown\", system_info.architecture);\n}\n}" ]
f707f4a58f8724c79e5c628b2905e85910fe694a
9,909
rs
Rust
cli/worker.rs
crabmusket/deno
520bdb6c31dd08b6f4e52de5116fd23d6d57fdda
[ "MIT" ]
null
null
null
cli/worker.rs
crabmusket/deno
520bdb6c31dd08b6f4e52de5116fd23d6d57fdda
[ "MIT" ]
null
null
null
cli/worker.rs
crabmusket/deno
520bdb6c31dd08b6f4e52de5116fd23d6d57fdda
[ "MIT" ]
null
null
null
// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license. use crate::fmt_errors::JSError; use crate::state::ThreadSafeState; use crate::tokio_util; use deno; use deno::ErrBox; use deno::ModuleSpecifier; use deno::StartupData; use futures::Async; use futures::Future; use std::sync::Arc; use std::sync::Mutex; /// Wraps deno::Isolate to provide source maps, ops for the CLI, and /// high-level module loading #[derive(Clone)] pub struct Worker { isolate: Arc<Mutex<deno::Isolate>>, pub state: ThreadSafeState, } impl Worker { pub fn new( _name: String, startup_data: StartupData, state: ThreadSafeState, ) -> Worker { let isolate = Arc::new(Mutex::new(deno::Isolate::new(startup_data, false))); { let mut i = isolate.lock().unwrap(); let state_ = state.clone(); i.set_dispatch(move |op_id, control_buf, zero_copy_buf| { state_.dispatch(op_id, control_buf, zero_copy_buf) }); let state_ = state.clone(); i.set_js_error_create(move |v8_exception| { JSError::from_v8_exception(v8_exception, &state_.ts_compiler) }) } Self { isolate, state } } /// Same as execute2() but the filename defaults to "<anonymous>". pub fn execute(&mut self, js_source: &str) -> Result<(), ErrBox> { self.execute2("<anonymous>", js_source) } /// Executes the provided JavaScript source code. The js_filename argument is /// provided only for debugging purposes. pub fn execute2( &mut self, js_filename: &str, js_source: &str, ) -> Result<(), ErrBox> { let mut isolate = self.isolate.lock().unwrap(); isolate.execute(js_filename, js_source) } /// Executes the provided JavaScript module. pub fn execute_mod_async( &mut self, module_specifier: &ModuleSpecifier, is_prefetch: bool, ) -> impl Future<Item = (), Error = ErrBox> { let worker = self.clone(); let loader = self.state.clone(); let isolate = self.isolate.clone(); let modules = self.state.modules.clone(); let recursive_load = deno::RecursiveLoad::new( &module_specifier.to_string(), loader, isolate, modules, ); recursive_load.and_then(move |id| -> Result<(), ErrBox> { worker.state.progress.done(); if is_prefetch { Ok(()) } else { let mut isolate = worker.isolate.lock().unwrap(); isolate.mod_evaluate(id) } }) } /// Executes the provided JavaScript module. pub fn execute_mod( &mut self, module_specifier: &ModuleSpecifier, is_prefetch: bool, ) -> Result<(), ErrBox> { tokio_util::block_on(self.execute_mod_async(module_specifier, is_prefetch)) } } impl Future for Worker { type Item = (); type Error = ErrBox; fn poll(&mut self) -> Result<Async<()>, ErrBox> { let mut isolate = self.isolate.lock().unwrap(); isolate.poll() } } #[cfg(test)] mod tests { use super::*; use crate::flags; use crate::ops::op_selector_std; use crate::progress::Progress; use crate::resources; use crate::startup_data; use crate::state::ThreadSafeState; use crate::tokio_util; use futures::future::lazy; use std::sync::atomic::Ordering; #[test] fn execute_mod_esm_imports_a() { let module_specifier = ModuleSpecifier::resolve_url_or_path("tests/esm_imports_a.js").unwrap(); let argv = vec![String::from("./deno"), module_specifier.to_string()]; let state = ThreadSafeState::new( flags::DenoFlags::default(), argv, op_selector_std, Progress::new(), true, ) .unwrap(); let state_ = state.clone(); tokio_util::run(lazy(move || { let mut worker = Worker::new("TEST".to_string(), StartupData::None, state); let result = worker.execute_mod(&module_specifier, false); if let Err(err) = result { eprintln!("execute_mod err {:?}", err); } tokio_util::panic_on_error(worker) })); let metrics = &state_.metrics; assert_eq!(metrics.resolve_count.load(Ordering::SeqCst), 2); // Check that we didn't start the compiler. assert_eq!(metrics.compiler_starts.load(Ordering::SeqCst), 0); } #[test] fn execute_mod_circular() { let module_specifier = ModuleSpecifier::resolve_url_or_path("tests/circular1.js").unwrap(); let argv = vec![String::from("./deno"), module_specifier.to_string()]; let state = ThreadSafeState::new( flags::DenoFlags::default(), argv, op_selector_std, Progress::new(), true, ) .unwrap(); let state_ = state.clone(); tokio_util::run(lazy(move || { let mut worker = Worker::new("TEST".to_string(), StartupData::None, state); let result = worker.execute_mod(&module_specifier, false); if let Err(err) = result { eprintln!("execute_mod err {:?}", err); } tokio_util::panic_on_error(worker) })); let metrics = &state_.metrics; assert_eq!(metrics.resolve_count.load(Ordering::SeqCst), 2); // Check that we didn't start the compiler. assert_eq!(metrics.compiler_starts.load(Ordering::SeqCst), 0); } #[test] fn execute_006_url_imports() { let module_specifier = ModuleSpecifier::resolve_url_or_path("tests/006_url_imports.ts").unwrap(); let argv = vec![String::from("deno"), module_specifier.to_string()]; let mut flags = flags::DenoFlags::default(); flags.reload = true; let state = ThreadSafeState::new(flags, argv, op_selector_std, Progress::new(), true) .unwrap(); let state_ = state.clone(); tokio_util::run(lazy(move || { let mut worker = Worker::new( "TEST".to_string(), startup_data::deno_isolate_init(), state, ); worker.execute("denoMain()").unwrap(); let result = worker.execute_mod(&module_specifier, false); if let Err(err) = result { eprintln!("execute_mod err {:?}", err); } tokio_util::panic_on_error(worker) })); let metrics = &state_.metrics; assert_eq!(metrics.resolve_count.load(Ordering::SeqCst), 3); // Check that we've only invoked the compiler once. assert_eq!(metrics.compiler_starts.load(Ordering::SeqCst), 1); } fn create_test_worker() -> Worker { let state = ThreadSafeState::mock(vec![ String::from("./deno"), String::from("hello.js"), ]); let mut worker = Worker::new("TEST".to_string(), startup_data::deno_isolate_init(), state); worker.execute("denoMain()").unwrap(); worker.execute("workerMain()").unwrap(); worker } #[test] fn test_worker_messages() { tokio_util::init(|| { let mut worker = create_test_worker(); let source = r#" onmessage = function(e) { console.log("msg from main script", e.data); if (e.data == "exit") { delete window.onmessage; return; } else { console.assert(e.data === "hi"); } postMessage([1, 2, 3]); console.log("after postMessage"); } "#; worker.execute(source).unwrap(); let resource = worker.state.resource.clone(); let resource_ = resource.clone(); tokio::spawn(lazy(move || { worker.then(move |r| -> Result<(), ()> { resource_.close(); r.unwrap(); Ok(()) }) })); let msg = json!("hi").to_string().into_boxed_str().into_boxed_bytes(); let r = resources::post_message_to_worker(resource.rid, msg).wait(); assert!(r.is_ok()); let maybe_msg = resources::get_message_from_worker(resource.rid) .wait() .unwrap(); assert!(maybe_msg.is_some()); // Check if message received is [1, 2, 3] in json assert_eq!(*maybe_msg.unwrap(), *b"[1,2,3]"); let msg = json!("exit") .to_string() .into_boxed_str() .into_boxed_bytes(); let r = resources::post_message_to_worker(resource.rid, msg).wait(); assert!(r.is_ok()); }) } #[test] fn removed_from_resource_table_on_close() { tokio_util::init(|| { let mut worker = create_test_worker(); worker .execute("onmessage = () => { delete window.onmessage; }") .unwrap(); let resource = worker.state.resource.clone(); let rid = resource.rid; let worker_future = worker .then(move |r| -> Result<(), ()> { resource.close(); println!("workers.rs after resource close"); r.unwrap(); Ok(()) }) .shared(); let worker_future_ = worker_future.clone(); tokio::spawn(lazy(move || worker_future_.then(|_| Ok(())))); assert_eq!(resources::get_type(rid), Some("worker".to_string())); let msg = json!("hi").to_string().into_boxed_str().into_boxed_bytes(); let r = resources::post_message_to_worker(rid, msg).wait(); assert!(r.is_ok()); debug!("rid {:?}", rid); worker_future.wait().unwrap(); assert_eq!(resources::get_type(rid), None); }) } #[test] fn execute_mod_resolve_error() { tokio_util::init(|| { // "foo" is not a valid module specifier so this should return an error. let mut worker = create_test_worker(); let module_specifier = ModuleSpecifier::resolve_url_or_path("does-not-exist").unwrap(); let result = worker.execute_mod_async(&module_specifier, false).wait(); assert!(result.is_err()); }) } #[test] fn execute_mod_002_hello() { tokio_util::init(|| { // This assumes cwd is project root (an assumption made throughout the // tests). let mut worker = create_test_worker(); let module_specifier = ModuleSpecifier::resolve_url_or_path("./tests/002_hello.ts").unwrap(); let result = worker.execute_mod_async(&module_specifier, false).wait(); assert!(result.is_ok()); }) } }
29.756757
80
0.617015
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn execute_mod_esm_imports_a() {\n let module_specifier =\n ModuleSpecifier::resolve_url_or_path(\"tests/esm_imports_a.js\").unwrap();\n let argv = vec![String::from(\"./deno\"), module_specifier.to_string()];\n let state = ThreadSafeState::new(\n flags::DenoFlags::default(),\n argv,\n op_selector_std,\n Progress::new(),\n true,\n )\n .unwrap();\n let state_ = state.clone();\n tokio_util::run(lazy(move || {\n let mut worker =\n Worker::new(\"TEST\".to_string(), StartupData::None, state);\n let result = worker.execute_mod(&module_specifier, false);\n if let Err(err) = result {\n eprintln!(\"execute_mod err {:?}\", err);\n }\n tokio_util::panic_on_error(worker)\n }));\n\n let metrics = &state_.metrics;\n assert_eq!(metrics.resolve_count.load(Ordering::SeqCst), 2);\n // Check that we didn't start the compiler.\n assert_eq!(metrics.compiler_starts.load(Ordering::SeqCst), 0);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn execute_mod_circular() {\n let module_specifier =\n ModuleSpecifier::resolve_url_or_path(\"tests/circular1.js\").unwrap();\n let argv = vec![String::from(\"./deno\"), module_specifier.to_string()];\n let state = ThreadSafeState::new(\n flags::DenoFlags::default(),\n argv,\n op_selector_std,\n Progress::new(),\n true,\n )\n .unwrap();\n let state_ = state.clone();\n tokio_util::run(lazy(move || {\n let mut worker =\n Worker::new(\"TEST\".to_string(), StartupData::None, state);\n let result = worker.execute_mod(&module_specifier, false);\n if let Err(err) = result {\n eprintln!(\"execute_mod err {:?}\", err);\n }\n tokio_util::panic_on_error(worker)\n }));\n\n let metrics = &state_.metrics;\n assert_eq!(metrics.resolve_count.load(Ordering::SeqCst), 2);\n // Check that we didn't start the compiler.\n assert_eq!(metrics.compiler_starts.load(Ordering::SeqCst), 0);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn execute_006_url_imports() {\n let module_specifier =\n ModuleSpecifier::resolve_url_or_path(\"tests/006_url_imports.ts\").unwrap();\n let argv = vec![String::from(\"deno\"), module_specifier.to_string()];\n let mut flags = flags::DenoFlags::default();\n flags.reload = true;\n let state =\n ThreadSafeState::new(flags, argv, op_selector_std, Progress::new(), true)\n .unwrap();\n let state_ = state.clone();\n tokio_util::run(lazy(move || {\n let mut worker = Worker::new(\n \"TEST\".to_string(),\n startup_data::deno_isolate_init(),\n state,\n );\n worker.execute(\"denoMain()\").unwrap();\n let result = worker.execute_mod(&module_specifier, false);\n if let Err(err) = result {\n eprintln!(\"execute_mod err {:?}\", err);\n }\n tokio_util::panic_on_error(worker)\n }));\n\n let metrics = &state_.metrics;\n assert_eq!(metrics.resolve_count.load(Ordering::SeqCst), 3);\n // Check that we've only invoked the compiler once.\n assert_eq!(metrics.compiler_starts.load(Ordering::SeqCst), 1);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_worker_messages() {\n tokio_util::init(|| {\n let mut worker = create_test_worker();\n let source = r#\"\n onmessage = function(e) {\n console.log(\"msg from main script\", e.data);\n if (e.data == \"exit\") {\n delete window.onmessage;\n return;\n } else {\n console.assert(e.data === \"hi\");\n }\n postMessage([1, 2, 3]);\n console.log(\"after postMessage\");\n }\n \"#;\n worker.execute(source).unwrap();\n\n let resource = worker.state.resource.clone();\n let resource_ = resource.clone();\n\n tokio::spawn(lazy(move || {\n worker.then(move |r| -> Result<(), ()> {\n resource_.close();\n r.unwrap();\n Ok(())\n })\n }));\n\n let msg = json!(\"hi\").to_string().into_boxed_str().into_boxed_bytes();\n\n let r = resources::post_message_to_worker(resource.rid, msg).wait();\n assert!(r.is_ok());\n\n let maybe_msg = resources::get_message_from_worker(resource.rid)\n .wait()\n .unwrap();\n assert!(maybe_msg.is_some());\n // Check if message received is [1, 2, 3] in json\n assert_eq!(*maybe_msg.unwrap(), *b\"[1,2,3]\");\n\n let msg = json!(\"exit\")\n .to_string()\n .into_boxed_str()\n .into_boxed_bytes();\n let r = resources::post_message_to_worker(resource.rid, msg).wait();\n assert!(r.is_ok());\n })\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn removed_from_resource_table_on_close() {\n tokio_util::init(|| {\n let mut worker = create_test_worker();\n worker\n .execute(\"onmessage = () => { delete window.onmessage; }\")\n .unwrap();\n\n let resource = worker.state.resource.clone();\n let rid = resource.rid;\n\n let worker_future = worker\n .then(move |r| -> Result<(), ()> {\n resource.close();\n println!(\"workers.rs after resource close\");\n r.unwrap();\n Ok(())\n })\n .shared();\n\n let worker_future_ = worker_future.clone();\n tokio::spawn(lazy(move || worker_future_.then(|_| Ok(()))));\n\n assert_eq!(resources::get_type(rid), Some(\"worker\".to_string()));\n\n let msg = json!(\"hi\").to_string().into_boxed_str().into_boxed_bytes();\n let r = resources::post_message_to_worker(rid, msg).wait();\n assert!(r.is_ok());\n debug!(\"rid {:?}\", rid);\n\n worker_future.wait().unwrap();\n assert_eq!(resources::get_type(rid), None);\n })\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn execute_mod_resolve_error() {\n tokio_util::init(|| {\n // \"foo\" is not a valid module specifier so this should return an error.\n let mut worker = create_test_worker();\n let module_specifier =\n ModuleSpecifier::resolve_url_or_path(\"does-not-exist\").unwrap();\n let result = worker.execute_mod_async(&module_specifier, false).wait();\n assert!(result.is_err());\n })\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn execute_mod_002_hello() {\n tokio_util::init(|| {\n // This assumes cwd is project root (an assumption made throughout the\n // tests).\n let mut worker = create_test_worker();\n let module_specifier =\n ModuleSpecifier::resolve_url_or_path(\"./tests/002_hello.ts\").unwrap();\n let result = worker.execute_mod_async(&module_specifier, false).wait();\n assert!(result.is_ok());\n })\n }\n}" ]
f7083653bd7c225a40aadf2840d1999f0867962b
598
rs
Rust
src/program/program.rs
ethanfaust/rust-intcode
29d2eeb23f12493f648369cd47723ac7cfd82335
[ "MIT" ]
null
null
null
src/program/program.rs
ethanfaust/rust-intcode
29d2eeb23f12493f648369cd47723ac7cfd82335
[ "MIT" ]
null
null
null
src/program/program.rs
ethanfaust/rust-intcode
29d2eeb23f12493f648369cd47723ac7cfd82335
[ "MIT" ]
null
null
null
#[derive(Debug, PartialEq, Clone)] pub struct IntCodeProgram { pub program: Vec<i64> } impl IntCodeProgram { pub fn from_string(input: String) -> IntCodeProgram { let memory: Vec<i64> = input.split(',') .map(|x| x.parse::<i64>().unwrap()) .collect(); return IntCodeProgram { program: memory }; } } #[cfg(test)] mod tests { use super::*; #[test] fn test_from_string() { let program = IntCodeProgram::from_string(String::from("1,0,0,0,99")); assert_eq!(vec![1,0,0,0,99], program.program); } }
22.148148
78
0.560201
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_from_string() {\n let program = IntCodeProgram::from_string(String::from(\"1,0,0,0,99\"));\n assert_eq!(vec![1,0,0,0,99], program.program);\n }\n}" ]
f70841561a62cd1565585d5cc5d3311af647fcdb
4,561
rs
Rust
src/util.rs
delthas/ellidri
e57ad32d28678d0295fa26f4477a3cad65d69eee
[ "ISC" ]
null
null
null
src/util.rs
delthas/ellidri
e57ad32d28678d0295fa26f4477a3cad65d69eee
[ "ISC" ]
null
null
null
src/util.rs
delthas/ellidri
e57ad32d28678d0295fa26f4477a3cad65d69eee
[ "ISC" ]
null
null
null
use rand_chacha::rand_core::{RngCore, SeedableRng}; use rand_chacha::ChaChaRng; use std::cell::RefCell; use std::time; thread_local! { static RNG: RefCell<ChaChaRng> = RefCell::new(ChaChaRng::seed_from_u64(time::SystemTime::now().duration_since(time::UNIX_EPOCH).unwrap().as_secs())); } pub type Masks<'a> = std::str::Split<'a, char>; pub struct MaskSet { raw: String, } impl MaskSet { pub fn new() -> Self { MaskSet { raw: String::new() } } pub fn is_match(&self, s: &str) -> bool { self.raw.split(',').any(|mask| match_mask(mask, s)) } /// Returns whether mask has been inserted. pub fn insert(&mut self, mask: &str) -> bool { if self.raw.split(',').any(|m| m == mask) { return false; } if !self.raw.is_empty() { self.raw.push(','); } self.raw.push_str(mask); true } /// Returns whether mask has been removed. pub fn remove(&mut self, mask: &str) -> bool { if let Some(removed) = self.raw.split(',').find(|m| *m == mask) { let start = removed.as_ptr() as usize - self.raw.as_ptr() as usize; let mut end = start + removed.len(); if end < self.raw.len() { end += 1; } self.raw.replace_range(start..end, ""); return true; } false } pub fn masks(&self) -> Masks<'_> { self.raw.split(',') } } // Taken from <https://golang.org/src/path/match.go?s=1084:1142#L28> pub fn match_mask(mut mask: &str, mut s: &str) -> bool { 'pattern: while !mask.is_empty() { let (star, chunk) = scan_chunk(&mut mask); if star && chunk.is_empty() { return true; } let (rest, ok) = match_chunk(chunk, s); if ok && (rest.is_empty() || !mask.is_empty()) { s = rest; continue; } if star { for i in 0..s.len() { let (rest, ok) = match_chunk(chunk, &s[i + 1..]); if ok { if mask.is_empty() && !rest.is_empty() { continue; } s = rest; continue 'pattern; } } } return false; } s.is_empty() } fn scan_chunk<'a>(mask: &mut &'a str) -> (bool, &'a str) { let initial_len = mask.len(); *mask = mask.trim_start_matches('*'); let star = mask.len() < initial_len; let i = mask.find('*').unwrap_or(mask.len()); let chunk = &mask[..i]; *mask = &mask[i..]; (star, chunk) } fn match_chunk<'a>(chunk: &str, mut s: &'a str) -> (&'a str, bool) { for fc in chunk.chars() { let mut it = s.chars(); let fs = match it.next() { Some(fs) => fs, None => return ("", false), }; if fc != '?' && fc != fs { return ("", false); } s = it.as_str(); } (s, true) } pub fn new_message_id() -> String { let mut bytes = [0x0; 24]; RNG.with(|rng| { rng.borrow_mut().fill_bytes(&mut bytes); }); let mut encoded = [0x0; 24 * 4 / 3]; base64::encode_config_slice(&bytes, base64::STANDARD_NO_PAD, &mut encoded); std::str::from_utf8(&encoded).unwrap().to_owned() } pub fn time_precise() -> String { chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Millis, true) } pub fn time_str() -> String { chrono::Local::now().to_rfc2822() } pub fn time() -> u64 { match time::SystemTime::now().duration_since(time::UNIX_EPOCH) { Ok(unix_time) => unix_time.as_secs(), Err(_) => { log::error!("Computer clock set before 01/01/1970?"); 0 } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_mask_match() { let cases = [ ("abc", "abc", true), ("*", "abc", true), ("*c", "abc", true), ("a*", "a", true), ("a*", "abc", true), ("a*/b", "abc/b", true), ("a*b?c*x", "abxbbxdbxebxczzx", true), ("a*b?c*x", "abxbbxdbxebxczzy", false), ("a?b", "a☺b", true), ("a???b", "a☺b", false), ("*x", "xxx", true), ]; for (mask, s, is_match) in &cases { assert_eq!( match_mask(mask, s), *is_match, "match_mask({:?}, {:?})", mask, s ); } } } // mod tests
24.788043
153
0.471826
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_mask_match() {\n let cases = [\n (\"abc\", \"abc\", true),\n (\"*\", \"abc\", true),\n (\"*c\", \"abc\", true),\n (\"a*\", \"a\", true),\n (\"a*\", \"abc\", true),\n (\"a*/b\", \"abc/b\", true),\n (\"a*b?c*x\", \"abxbbxdbxebxczzx\", true),\n (\"a*b?c*x\", \"abxbbxdbxebxczzy\", false),\n (\"a?b\", \"a☺b\", true),\n (\"a???b\", \"a☺b\", false),\n (\"*x\", \"xxx\", true),\n ];\n\n for (mask, s, is_match) in &cases {\n assert_eq!(\n match_mask(mask, s),\n *is_match,\n \"match_mask({:?}, {:?})\",\n mask,\n s\n );\n }\n }\n}" ]
f708516e5823b9b5b0d7cc7366f38aeee25ba002
8,462
rs
Rust
storage/tests/storage_for_shell.rs
mrjoe7/tezedge
ed4c33ca4ea575e5b8f238ce16575fbf3f9e1001
[ "MIT" ]
1
2019-11-29T18:25:07.000Z
2019-11-29T18:25:07.000Z
storage/tests/storage_for_shell.rs
mrjoe7/tezedge
ed4c33ca4ea575e5b8f238ce16575fbf3f9e1001
[ "MIT" ]
null
null
null
storage/tests/storage_for_shell.rs
mrjoe7/tezedge
ed4c33ca4ea575e5b8f238ce16575fbf3f9e1001
[ "MIT" ]
null
null
null
// Copyright (c) SimpleStaking and Tezedge Contributors // SPDX-License-Identifier: MIT use std::env; use std::path::{Path, PathBuf}; use failure::Error; use slog::{Drain, Level, Logger}; use crypto::hash::{chain_id_from_block_hash, ContextHash, HashType}; use storage::*; use storage::tests_common::TmpStorage; use tezos_api::environment::TezosEnvironmentConfiguration; use tezos_api::ffi::{ApplyBlockResponse, CommitGenesisResult, GenesisChain, ProtocolOverrides}; use tezos_messages::p2p::binary_message::BinaryMessage; use tezos_messages::p2p::encoding::prelude::*; #[test] fn test_storage() -> Result<(), Error> { // logger let log = create_logger(); // storage let context_dir = PathBuf::from("__storage_for_shell"); let tmp_storage_dir = test_storage_dir_path("__storage_for_shell"); let tmp_storage = TmpStorage::create(tmp_storage_dir.clone())?; let mut block_storage = BlockStorage::new(tmp_storage.storage()); let mut block_meta_storage = BlockMetaStorage::new(tmp_storage.storage()); let mut operations_meta_storage = OperationsMetaStorage::new(tmp_storage.storage()); // tezos env - sample let tezos_env = TezosEnvironmentConfiguration { genesis: GenesisChain { time: "2019-08-06T15:18:56Z".to_string(), block: "BLockGenesisGenesisGenesisGenesisGenesiscde8db4cX94".to_string(), protocol: "PtBMwNZT94N7gXKw4i273CKcSaBrrBnqnt3RATExNKr9KNX2USV".to_string(), }, bootstrap_lookup_addresses: vec![ "bootstrap.zeronet.fun".to_string(), "bootzero.tzbeta.net".to_string() ], version: "TEZOS_ZERONET_2019-08-06T15:18:56Z".to_string(), protocol_overrides: ProtocolOverrides { forced_protocol_upgrades: vec![], voted_protocol_overrides: vec![], }, enable_testchain: true, }; // initialize empty storage let init_data = resolve_storage_init_chain_data(&tezos_env, &tmp_storage_dir, &context_dir, log.clone()); assert!(init_data.is_ok()); let init_data = init_data.unwrap(); assert_eq!(init_data.genesis_block_header_hash, HashType::BlockHash.string_to_bytes(&tezos_env.genesis.block)?); assert_eq!(init_data.chain_id, chain_id_from_block_hash(&HashType::BlockHash.string_to_bytes(&tezos_env.genesis.block)?)); // load current head (non) let current_head = block_meta_storage.load_current_head(); assert!(current_head.is_ok()); assert!(current_head.unwrap().is_none()); // genesis is aleady stored with context hash zero let genesis = block_storage.get(&init_data.genesis_block_header_hash)?; assert!(genesis.is_none()); // simulate commit genesis in two steps let new_context_hash: ContextHash = HashType::ContextHash.string_to_bytes("CoV16kW8WgL51SpcftQKdeqc94D6ekghMgPMmEn7TSZzFA697PeE")?; let _ = initialize_storage_with_genesis_block( &mut block_storage, &init_data, &tezos_env, &new_context_hash, log.clone(), )?; let commit_genesis_result = CommitGenesisResult { block_header_proto_json: "{block_header_proto_json}".to_string(), block_header_proto_metadata_json: "{block_header_proto_metadata_json}".to_string(), operations_proto_metadata_json: "{operations_proto_metadata_json}".to_string(), }; let _ = store_commit_genesis_result( &mut block_storage, &mut block_meta_storage, &mut operations_meta_storage, &init_data, commit_genesis_result.clone(), )?; // genesis is stored with replaced context hash let genesis = block_storage.get(&init_data.genesis_block_header_hash)?.expect("Genesis was not stored!"); assert_eq!(new_context_hash, genesis.header.context().clone()); // genesis is stored with replaced context hash let genesis = block_storage.get_by_context_hash(&new_context_hash)?.expect("Genesis was not assigned to context_hash!"); assert_eq!(new_context_hash, genesis.header.context().clone()); // check genesis jsons let (_, data) = block_storage.get_with_json_data(&init_data.genesis_block_header_hash)?.expect("No json data was saved"); assert_eq!(data.block_header_proto_json(), &commit_genesis_result.block_header_proto_json); assert_eq!(data.block_header_proto_metadata_json(), &commit_genesis_result.block_header_proto_metadata_json); assert_eq!(data.operations_proto_metadata_json(), &commit_genesis_result.operations_proto_metadata_json); // simulate apply block let block = make_test_block_header()?; block_storage.put_block_header(&block)?; block_meta_storage.put_block_header(&block, &init_data.chain_id)?; let mut metadata = block_meta_storage.get(&block.hash)?.expect("No metadata was saved"); assert!(!metadata.is_applied()); // save apply result let apply_result = ApplyBlockResponse { last_allowed_fork_level: 5, max_operations_ttl: 6, context_hash: HashType::ContextHash.string_to_bytes("CoVmAcMV64uAQo8XvfLr9VDuz7HVZLT4cgK1w1qYmTjQNbGwQwDd")?.clone(), block_header_proto_json: "{block_header_proto_json}".to_string(), block_header_proto_metadata_json: "{block_header_proto_metadata_json}".to_string(), operations_proto_metadata_json: "{operations_proto_metadata_json}".to_string(), validation_result_message: "applied".to_string(), forking_testchain: false, forking_testchain_data: None, }; let (block_json_data, block_additional_data) = store_applied_block_result( &mut block_storage, &mut block_meta_storage, &block.hash, apply_result.clone(), &mut metadata, )?; // check if data stored assert!(metadata.is_applied()); let metadata = block_meta_storage.get(&block.hash)?.expect("No metadata was found"); assert!(metadata.is_applied()); // check additional let (_, data) = block_storage.get_with_additional_data(&block.hash)?.expect("No additional data was saved"); assert_eq!(data.max_operations_ttl(), apply_result.max_operations_ttl as u16); assert_eq!(data.last_allowed_fork_level(), apply_result.last_allowed_fork_level); assert_eq!(block_additional_data.max_operations_ttl(), apply_result.max_operations_ttl as u16); assert_eq!(block_additional_data.last_allowed_fork_level(), apply_result.last_allowed_fork_level); // check json let (_, data) = block_storage.get_with_json_data(&block.hash)?.expect("No json data was saved"); assert_eq!(data.block_header_proto_json(), &apply_result.block_header_proto_json); assert_eq!(data.block_header_proto_metadata_json(), &apply_result.block_header_proto_metadata_json); assert_eq!(data.operations_proto_metadata_json(), &apply_result.operations_proto_metadata_json); assert_eq!(block_json_data.block_header_proto_json(), &apply_result.block_header_proto_json); assert_eq!(block_json_data.block_header_proto_metadata_json(), &apply_result.block_header_proto_metadata_json); assert_eq!(block_json_data.operations_proto_metadata_json(), &apply_result.operations_proto_metadata_json); // load current head - should be changeg let current_head = block_meta_storage.load_current_head()?.expect("Current header should be set"); assert_eq!(current_head, block.hash); Ok(()) } fn make_test_block_header() -> Result<BlockHeaderWithHash, Error> { let message_bytes = hex::decode("00006d6e0102dd00defaf70c53e180ea148b349a6feb4795610b2abc7b07fe91ce50a90814000000005c1276780432bc1d3a28df9a67b363aa1638f807214bb8987e5f9c0abcbd69531facffd1c80000001100000001000000000800000000000c15ef15a6f54021cb353780e2847fb9c546f1d72c1dc17c3db510f45553ce501ce1de000000000003c762c7df00a856b8bfcaf0676f069f825ca75f37f2bee9fe55ba109cec3d1d041d8c03519626c0c0faa557e778cb09d2e0c729e8556ed6a7a518c84982d1f2682bc6aa753f")?; let block_header = BlockHeaderWithHash::new(BlockHeader::from_bytes(message_bytes)?)?; Ok(block_header) } pub fn test_storage_dir_path(dir_name: &str) -> PathBuf { let out_dir = env::var("OUT_DIR").expect("OUT_DIR is not defined"); let path = Path::new(out_dir.as_str()) .join(Path::new(dir_name)) .to_path_buf(); path } fn create_logger() -> Logger { let drain = slog_async::Async::new(slog_term::FullFormat::new(slog_term::TermDecorator::new().build()).build().fuse()).build().filter_level(Level::Info).fuse(); Logger::root(drain, slog::o!()) }
47.80791
453
0.74415
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_storage() -> Result<(), Error> {\n // logger\n let log = create_logger();\n\n // storage\n let context_dir = PathBuf::from(\"__storage_for_shell\");\n let tmp_storage_dir = test_storage_dir_path(\"__storage_for_shell\");\n let tmp_storage = TmpStorage::create(tmp_storage_dir.clone())?;\n let mut block_storage = BlockStorage::new(tmp_storage.storage());\n let mut block_meta_storage = BlockMetaStorage::new(tmp_storage.storage());\n let mut operations_meta_storage = OperationsMetaStorage::new(tmp_storage.storage());\n\n // tezos env - sample\n let tezos_env = TezosEnvironmentConfiguration {\n genesis: GenesisChain {\n time: \"2019-08-06T15:18:56Z\".to_string(),\n block: \"BLockGenesisGenesisGenesisGenesisGenesiscde8db4cX94\".to_string(),\n protocol: \"PtBMwNZT94N7gXKw4i273CKcSaBrrBnqnt3RATExNKr9KNX2USV\".to_string(),\n },\n bootstrap_lookup_addresses: vec![\n \"bootstrap.zeronet.fun\".to_string(),\n \"bootzero.tzbeta.net\".to_string()\n ],\n version: \"TEZOS_ZERONET_2019-08-06T15:18:56Z\".to_string(),\n protocol_overrides: ProtocolOverrides {\n forced_protocol_upgrades: vec![],\n voted_protocol_overrides: vec![],\n },\n enable_testchain: true,\n };\n\n // initialize empty storage\n let init_data = resolve_storage_init_chain_data(&tezos_env, &tmp_storage_dir, &context_dir, log.clone());\n assert!(init_data.is_ok());\n\n let init_data = init_data.unwrap();\n assert_eq!(init_data.genesis_block_header_hash, HashType::BlockHash.string_to_bytes(&tezos_env.genesis.block)?);\n assert_eq!(init_data.chain_id, chain_id_from_block_hash(&HashType::BlockHash.string_to_bytes(&tezos_env.genesis.block)?));\n\n // load current head (non)\n let current_head = block_meta_storage.load_current_head();\n assert!(current_head.is_ok());\n assert!(current_head.unwrap().is_none());\n\n // genesis is aleady stored with context hash zero\n let genesis = block_storage.get(&init_data.genesis_block_header_hash)?;\n assert!(genesis.is_none());\n\n // simulate commit genesis in two steps\n let new_context_hash: ContextHash = HashType::ContextHash.string_to_bytes(\"CoV16kW8WgL51SpcftQKdeqc94D6ekghMgPMmEn7TSZzFA697PeE\")?;\n let _ = initialize_storage_with_genesis_block(\n &mut block_storage,\n &init_data,\n &tezos_env,\n &new_context_hash,\n log.clone(),\n )?;\n\n let commit_genesis_result = CommitGenesisResult {\n block_header_proto_json: \"{block_header_proto_json}\".to_string(),\n block_header_proto_metadata_json: \"{block_header_proto_metadata_json}\".to_string(),\n operations_proto_metadata_json: \"{operations_proto_metadata_json}\".to_string(),\n };\n let _ = store_commit_genesis_result(\n &mut block_storage,\n &mut block_meta_storage,\n &mut operations_meta_storage,\n &init_data,\n commit_genesis_result.clone(),\n )?;\n\n // genesis is stored with replaced context hash\n let genesis = block_storage.get(&init_data.genesis_block_header_hash)?.expect(\"Genesis was not stored!\");\n assert_eq!(new_context_hash, genesis.header.context().clone());\n\n // genesis is stored with replaced context hash\n let genesis = block_storage.get_by_context_hash(&new_context_hash)?.expect(\"Genesis was not assigned to context_hash!\");\n assert_eq!(new_context_hash, genesis.header.context().clone());\n\n // check genesis jsons\n let (_, data) = block_storage.get_with_json_data(&init_data.genesis_block_header_hash)?.expect(\"No json data was saved\");\n assert_eq!(data.block_header_proto_json(), &commit_genesis_result.block_header_proto_json);\n assert_eq!(data.block_header_proto_metadata_json(), &commit_genesis_result.block_header_proto_metadata_json);\n assert_eq!(data.operations_proto_metadata_json(), &commit_genesis_result.operations_proto_metadata_json);\n\n // simulate apply block\n let block = make_test_block_header()?;\n block_storage.put_block_header(&block)?;\n block_meta_storage.put_block_header(&block, &init_data.chain_id)?;\n let mut metadata = block_meta_storage.get(&block.hash)?.expect(\"No metadata was saved\");\n assert!(!metadata.is_applied());\n\n // save apply result\n let apply_result = ApplyBlockResponse {\n last_allowed_fork_level: 5,\n max_operations_ttl: 6,\n context_hash: HashType::ContextHash.string_to_bytes(\"CoVmAcMV64uAQo8XvfLr9VDuz7HVZLT4cgK1w1qYmTjQNbGwQwDd\")?.clone(),\n block_header_proto_json: \"{block_header_proto_json}\".to_string(),\n block_header_proto_metadata_json: \"{block_header_proto_metadata_json}\".to_string(),\n operations_proto_metadata_json: \"{operations_proto_metadata_json}\".to_string(),\n validation_result_message: \"applied\".to_string(),\n forking_testchain: false,\n forking_testchain_data: None,\n };\n let (block_json_data, block_additional_data) = store_applied_block_result(\n &mut block_storage,\n &mut block_meta_storage,\n &block.hash,\n apply_result.clone(),\n &mut metadata,\n )?;\n\n // check if data stored\n assert!(metadata.is_applied());\n let metadata = block_meta_storage.get(&block.hash)?.expect(\"No metadata was found\");\n assert!(metadata.is_applied());\n\n // check additional\n let (_, data) = block_storage.get_with_additional_data(&block.hash)?.expect(\"No additional data was saved\");\n assert_eq!(data.max_operations_ttl(), apply_result.max_operations_ttl as u16);\n assert_eq!(data.last_allowed_fork_level(), apply_result.last_allowed_fork_level);\n assert_eq!(block_additional_data.max_operations_ttl(), apply_result.max_operations_ttl as u16);\n assert_eq!(block_additional_data.last_allowed_fork_level(), apply_result.last_allowed_fork_level);\n\n // check json\n let (_, data) = block_storage.get_with_json_data(&block.hash)?.expect(\"No json data was saved\");\n assert_eq!(data.block_header_proto_json(), &apply_result.block_header_proto_json);\n assert_eq!(data.block_header_proto_metadata_json(), &apply_result.block_header_proto_metadata_json);\n assert_eq!(data.operations_proto_metadata_json(), &apply_result.operations_proto_metadata_json);\n assert_eq!(block_json_data.block_header_proto_json(), &apply_result.block_header_proto_json);\n assert_eq!(block_json_data.block_header_proto_metadata_json(), &apply_result.block_header_proto_metadata_json);\n assert_eq!(block_json_data.operations_proto_metadata_json(), &apply_result.operations_proto_metadata_json);\n\n // load current head - should be changeg\n let current_head = block_meta_storage.load_current_head()?.expect(\"Current header should be set\");\n assert_eq!(current_head, block.hash);\n\n Ok(())\n}\n}" ]
f70888f47b4cec862b5adaae56dc2ade256a044e
2,540
rs
Rust
src/utils/num.rs
Turbo87/rust-igc
bb679dc942cb014c32bbe83b85efd05e19567001
[ "Apache-2.0" ]
1
2018-11-27T21:19:08.000Z
2018-11-27T21:19:08.000Z
src/utils/num.rs
Turbo87/rust-igc
bb679dc942cb014c32bbe83b85efd05e19567001
[ "Apache-2.0" ]
4
2018-11-06T15:46:27.000Z
2019-09-23T06:13:28.000Z
src/utils/num.rs
Turbo87/rust-igc
bb679dc942cb014c32bbe83b85efd05e19567001
[ "Apache-2.0" ]
null
null
null
use std::str::{FromStr, from_utf8_unchecked}; pub fn parse_int<T: FromStr>(bytes: &[u8]) -> Option<T> { // `unsafe` here should be okay because `from_str()` converts back // to `&[u8]` and only cares about ASCII digits let chars = unsafe { from_utf8_unchecked(bytes) }; <(T)>::from_str(chars).ok() } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse_int() { assert_eq!(parse_int::<u16>(b"0"), Some(0)); assert_eq!(parse_int::<u8>(b"12"), Some(12)); assert_eq!(parse_int::<u16>(b"2018"), Some(2018)); assert_eq!(parse_int::<u32>(b"2018"), Some(2018)); assert_eq!(parse_int::<u32>(b"01d8"), None); assert_eq!(parse_int::<u32>(b"-018"), None); assert_eq!(parse_int::<i16>(b"0"), Some(0)); assert_eq!(parse_int::<i16>(b"-12"), Some(-12)); assert_eq!(parse_int::<i16>(b"2018"), Some(2018)); assert_eq!(parse_int::<i32>(b"-018"), Some(-18)); assert_eq!(parse_int::<i32>(b"-0d18"), None); } proptest! { #[test] fn test_parse_int_with_u8(v: u8) { let input = format!("{}", v); prop_assert_eq!(parse_int::<u8>(input.as_bytes()), Some(v)); } #[test] fn test_parse_int_with_u16(v: u16) { let input = format!("{}", v); prop_assert_eq!(parse_int::<u16>(input.as_bytes()), Some(v)); } #[test] fn test_parse_int_with_u32(v: u32) { let input = format!("{}", v); prop_assert_eq!(parse_int::<u32>(input.as_bytes()), Some(v)); } #[test] fn test_parse_int_with_u64(v: u64) { let input = format!("{}", v); prop_assert_eq!(parse_int::<u64>(input.as_bytes()), Some(v)); } #[test] fn test_parse_int_with_i8(v: i8) { let input = format!("{}", v); prop_assert_eq!(parse_int::<i8>(input.as_bytes()), Some(v)); } #[test] fn test_parse_int_with_i16(v: i16) { let input = format!("{}", v); prop_assert_eq!(parse_int::<i16>(input.as_bytes()), Some(v)); } #[test] fn test_parse_int_with_i32(v: i32) { let input = format!("{}", v); prop_assert_eq!(parse_int::<i32>(input.as_bytes()), Some(v)); } #[test] fn test_parse_int_with_i64(v: i64) { let input = format!("{}", v); prop_assert_eq!(parse_int::<i64>(input.as_bytes()), Some(v)); } } }
32.151899
73
0.52126
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_parse_int() {\n assert_eq!(parse_int::<u16>(b\"0\"), Some(0));\n assert_eq!(parse_int::<u8>(b\"12\"), Some(12));\n assert_eq!(parse_int::<u16>(b\"2018\"), Some(2018));\n assert_eq!(parse_int::<u32>(b\"2018\"), Some(2018));\n assert_eq!(parse_int::<u32>(b\"01d8\"), None);\n assert_eq!(parse_int::<u32>(b\"-018\"), None);\n assert_eq!(parse_int::<i16>(b\"0\"), Some(0));\n assert_eq!(parse_int::<i16>(b\"-12\"), Some(-12));\n assert_eq!(parse_int::<i16>(b\"2018\"), Some(2018));\n assert_eq!(parse_int::<i32>(b\"-018\"), Some(-18));\n assert_eq!(parse_int::<i32>(b\"-0d18\"), None);\n }\n}" ]
f708a15894e34d83c789b3076b58915d1f3de980
4,358
rs
Rust
17/main.rs
tacgomes/advent-of-code-2018
591e4346f9dd7cecf24bc215e8a06f31404405fb
[ "MIT" ]
null
null
null
17/main.rs
tacgomes/advent-of-code-2018
591e4346f9dd7cecf24bc215e8a06f31404405fb
[ "MIT" ]
null
null
null
17/main.rs
tacgomes/advent-of-code-2018
591e4346f9dd7cecf24bc215e8a06f31404405fb
[ "MIT" ]
null
null
null
use regex::Regex; use std::env; use std::fs; use std::path::Path; use std::process; #[macro_use] extern crate lazy_static; use itertools::Itertools; lazy_static! { static ref RE1: Regex = Regex::new(r"x=(\d+), y=(\d+)..(\d+)").unwrap(); static ref RE2: Regex = Regex::new(r"y=(\d+), x=(\d+)..(\d+)").unwrap(); } type Area = (usize, usize, usize, usize); const SPRING_X: usize = 500; const SPRING_Y: usize = 0; #[derive(Eq, PartialEq, Clone)] enum Cell { Spring, Sand, Clay, Flowing, Still, } fn parse_input(file_name: impl AsRef<Path>) -> Vec<Area> { fs::read_to_string(file_name) .unwrap() .lines() .map(|l| { if let Some(cap) = RE1.captures(l) { let x = cap[1].parse::<usize>().unwrap(); let y1 = cap[2].parse::<usize>().unwrap(); let y2 = cap[3].parse::<usize>().unwrap(); (x, x, y1, y2) } else if let Some(cap) = RE2.captures(l) { let y = cap[1].parse::<usize>().unwrap(); let x1 = cap[2].parse::<usize>().unwrap(); let x2 = cap[3].parse::<usize>().unwrap(); (x1, x2, y, y) } else { panic!("Malformatted input line: {}", l); } }) .collect() } fn drip(row: usize, col: usize, xdir: isize, grid: &mut [Vec<Cell>]) -> usize { if grid[row][col] == Cell::Sand { grid[row][col] = Cell::Flowing; } if row == grid.len() - 1 || grid[row][col] == Cell::Clay { return col; } if grid[row + 1][col] == Cell::Sand { drip(row + 1, col, 0, grid); } if grid[row + 1][col] == Cell::Clay || grid[row + 1][col] == Cell::Still { if xdir != 0 { return drip(row, (col as isize + xdir) as usize, xdir, grid); } else { let left_col = drip(row, col - 1, -1, grid); let right_col = drip(row, col + 1, 1, grid); if grid[row][left_col] == Cell::Clay && grid[row][right_col] == Cell::Clay { for c in left_col + 1..right_col { grid[row][c] = Cell::Still; } } } } col } fn display(grid: &[Vec<Cell>]) { for row in grid.iter() { for col in row.iter() { match col { Cell::Spring => print!("+"), Cell::Sand => print!("."), Cell::Clay => print!("#"), Cell::Flowing => print!("|"), Cell::Still => print!("~"), }; } println!(); } println!(); } fn count_cells(cell: Cell, min_y: usize, grid: &[Vec<Cell>]) -> usize { grid.iter() .skip(min_y) .flat_map(|x| x.iter()) .filter(|&col| *col == cell) .count() } fn solve(clay_areas: &[Area]) -> (usize, usize) { let min_x = clay_areas.iter().map(|t| t.0).min().unwrap(); let max_x = clay_areas.iter().map(|t| t.1).max().unwrap(); let min_y = clay_areas.iter().map(|t| t.2).min().unwrap(); let max_y = clay_areas.iter().map(|t| t.3).max().unwrap(); let mut grid = vec![vec![Cell::Sand; max_x - min_x + 2]; max_y + 1]; grid[SPRING_Y][SPRING_X - min_x + 1] = Cell::Spring; for area in clay_areas { let (x1, x2, y1, y2) = *area; for (x, y) in (x1..=x2).cartesian_product(y1..=y2) { grid[y][x - min_x + 1] = Cell::Clay; } } drip(SPRING_Y + 1, SPRING_X - min_x + 1, 0, &mut grid); display(&grid); let flowing = count_cells(Cell::Flowing, min_y, &grid); let still = count_cells(Cell::Still, min_y, &grid); (flowing + still, still) } fn main() { if env::args().count() != 2 { eprintln!("USAGE: {} FILE", env::args().next().unwrap()); process::exit(1); } let clay_areas = parse_input(env::args().nth(1).unwrap()); let (part1, part2) = solve(&clay_areas); println!("Part 1: {}", part1); println!("Part 2: {}", part2); } #[cfg(test)] mod tests { use super::*; #[test] fn test_puzzle_example() { let clay_areas = parse_input("example.txt"); assert_eq!(solve(&clay_areas), (57, 29)); } #[test] fn test_puzzle_input() { let clay_areas = parse_input("input.txt"); assert_eq!(solve(&clay_areas), (31471, 24169)); } }
27.408805
88
0.494493
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_puzzle_example() {\n let clay_areas = parse_input(\"example.txt\");\n assert_eq!(solve(&clay_areas), (57, 29));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_puzzle_input() {\n let clay_areas = parse_input(\"input.txt\");\n assert_eq!(solve(&clay_areas), (31471, 24169));\n }\n}" ]
f708bf8c94ae0358ed54b1319097371effc8e2be
48,811
rs
Rust
polars/polars-lazy/src/tests/queries.rs
JanKaul/polars
d7bc52e1d9b984e521094bc926e214b8fef417c8
[ "MIT" ]
null
null
null
polars/polars-lazy/src/tests/queries.rs
JanKaul/polars
d7bc52e1d9b984e521094bc926e214b8fef417c8
[ "MIT" ]
null
null
null
polars/polars-lazy/src/tests/queries.rs
JanKaul/polars
d7bc52e1d9b984e521094bc926e214b8fef417c8
[ "MIT" ]
null
null
null
use super::*; use polars_arrow::prelude::QuantileInterpolOptions; use polars_core::series::ops::NullBehavior; #[test] fn test_lazy_with_column() { let df = get_df() .lazy() .with_column(lit(10).alias("foo")) .collect() .unwrap(); println!("{:?}", df); assert_eq!(df.width(), 6); assert!(df.column("foo").is_ok()); let df = get_df() .lazy() .with_column(lit(10).alias("foo")) .select([col("foo"), col("sepal.width")]) .collect() .unwrap(); println!("{:?}", df); } #[test] fn test_lazy_exec() { let df = get_df(); let new = df .clone() .lazy() .select([col("sepal.width"), col("variety")]) .sort("sepal.width", false) .collect(); println!("{:?}", new); let new = df .lazy() .filter(not(col("sepal.width").lt(lit(3.5)))) .collect() .unwrap(); let check = new.column("sepal.width").unwrap().f64().unwrap().gt(3.4); assert!(check.all()) } #[test] fn test_lazy_alias() { let df = get_df(); let new = df .lazy() .select([col("sepal.width").alias("petals"), col("sepal.width")]) .collect() .unwrap(); assert_eq!(new.get_column_names(), &["petals", "sepal.width"]); } #[test] fn test_lazy_melt() { let df = get_df(); let out = df .lazy() .melt( vec!["petal.width".to_string(), "petal.length".to_string()], vec!["sepal.length".to_string(), "sepal.width".to_string()], ) .filter(col("variable").eq(lit("sepal.length"))) .select([col("variable"), col("petal.width"), col("value")]) .collect() .unwrap(); assert_eq!(out.shape(), (7, 3)); } #[test] fn test_lazy_drop_nulls() { let df = df! { "foo" => &[Some(1), None, Some(3)], "bar" => &[Some(1), Some(2), None] } .unwrap(); let new = df.lazy().drop_nulls(None).collect().unwrap(); let out = df! { "foo" => &[Some(1)], "bar" => &[Some(1)] } .unwrap(); assert!(new.frame_equal(&out)); } #[test] fn test_lazy_udf() { let df = get_df(); let new = df .lazy() .select([col("sepal.width").map(|s| Ok(s * 200.0), GetOutput::same_type())]) .collect() .unwrap(); assert_eq!( new.column("sepal.width").unwrap().f64().unwrap().get(0), Some(700.0) ); } #[test] fn test_lazy_is_null() { let df = get_df(); let new = df .clone() .lazy() .filter(col("sepal.width").is_null()) .collect() .unwrap(); assert_eq!(new.height(), 0); let new = df .clone() .lazy() .filter(col("sepal.width").is_not_null()) .collect() .unwrap(); assert_eq!(new.height(), df.height()); let new = df .lazy() .groupby([col("variety")]) .agg([col("sepal.width").min()]) .collect() .unwrap(); println!("{:?}", new); assert_eq!(new.shape(), (1, 2)); } #[test] fn test_lazy_pushdown_through_agg() { // An aggregation changes the schema names, check if the pushdown succeeds. let df = get_df(); let new = df .lazy() .groupby([col("variety")]) .agg([ col("sepal.length").min(), col("petal.length").min().alias("foo"), ]) .select([col("foo")]) // second selection is to test if optimizer can handle that .select([col("foo").alias("bar")]) .collect() .unwrap(); println!("{:?}", new); } #[test] #[cfg(feature = "temporal")] fn test_lazy_agg() { let s0 = DateChunked::parse_from_str_slice( "date", &[ "2020-08-21", "2020-08-21", "2020-08-22", "2020-08-23", "2020-08-22", ], "%Y-%m-%d", ) .into_series(); let s1 = Series::new("temp", [20, 10, 7, 9, 1].as_ref()); let s2 = Series::new("rain", [0.2, 0.1, 0.3, 0.1, 0.01].as_ref()); let df = DataFrame::new(vec![s0, s1, s2]).unwrap(); let lf = df .lazy() .groupby([col("date")]) .agg([ col("rain").min().alias("min"), col("rain").sum().alias("sum"), col("rain") .quantile(0.5, QuantileInterpolOptions::default()) .alias("median_rain"), ]) .sort("date", false); let new = lf.collect().unwrap(); dbg!(new); } #[test] fn test_lazy_shift() { let df = get_df(); let new = df .lazy() .select([col("sepal.width").alias("foo").shift(2)]) .collect() .unwrap(); assert_eq!(new.column("foo").unwrap().f64().unwrap().get(0), None); } #[test] fn test_shift_and_fill() -> Result<()> { let out = df![ "a" => [1, 2, 3] ]? .lazy() .select([col("a").shift_and_fill(-1, lit(5))]) .collect()?; let out = out.column("a")?; assert_eq!(Vec::from(out.i32()?), &[Some(2), Some(3), Some(5)]); Ok(()) } #[test] fn test_lazy_ternary_and_predicates() { let df = get_df(); // test if this runs. This failed because is_not_null changes the schema name, so we // really need to check the root column let ldf = df .clone() .lazy() .with_column(lit(3).alias("foo")) .filter(col("foo").is_not_null()); let _new = ldf.collect().unwrap(); let ldf = df .lazy() .with_column( when(col("sepal.length").lt(lit(5.0))) .then( lit(3), // is another type on purpose to check type coercion ) .otherwise(col("sepal.width")) .alias("foo"), ) .filter(col("foo").gt(lit(3.0))); let new = ldf.collect().unwrap(); dbg!(new); } #[test] fn test_lazy_binary_ops() { let df = df!("a" => &[1, 2, 3, 4, 5, ]).unwrap(); let new = df .lazy() .select([col("a").eq(lit(2)).alias("foo")]) .collect() .unwrap(); assert_eq!(new.column("foo").unwrap().sum::<i32>(), Some(1)); } #[test] fn test_lazy_query_1() { // test on aggregation pushdown // and a filter that is not in the projection let df_a = load_df(); let df_b = df_a.clone(); df_a.lazy() .left_join(df_b.lazy(), col("b"), col("b")) .filter(col("a").lt(lit(2))) .groupby([col("b")]) .agg([col("b").first(), col("c").first()]) .select([col("b"), col("c")]) .collect() .unwrap(); } #[test] fn test_lazy_query_2() { let df = load_df(); let ldf = df .lazy() .with_column( col("a") .map(|s| Ok(s * 2), GetOutput::same_type()) .alias("foo"), ) .filter(col("a").lt(lit(2))) .select([col("b"), col("a")]); let new = ldf.collect().unwrap(); assert_eq!(new.shape(), (1, 2)); } #[test] fn test_lazy_query_3() { // query checks if schema of scanning is not changed by aggregation let _ = scan_foods_csv() .groupby([col("calories")]) .agg([col("fats_g").max()]) .collect() .unwrap(); } #[test] fn test_lazy_query_4() { let df = df! { "uid" => [0, 0, 0, 1, 1, 1], "day" => [1, 2, 3, 1, 2, 3], "cumcases" => [10, 12, 15, 25, 30, 41] } .unwrap(); let base_df = df.lazy(); let out = base_df .clone() .groupby([col("uid")]) .agg([ col("day").list().alias("day"), col("cumcases") .apply(|s: Series| Ok(&s - &(s.shift(1))), GetOutput::same_type()) .alias("diff_cases"), ]) .explode([col("day"), col("diff_cases")]) .join( base_df, [col("uid"), col("day")], [col("uid"), col("day")], JoinType::Inner, ) .collect() .unwrap(); assert_eq!( Vec::from(out.column("diff_cases").unwrap().i32().unwrap()), &[None, Some(2), Some(3), None, Some(5), Some(11)] ); } #[test] fn test_lazy_query_5() { // if this one fails, the list builder probably does not handle offsets let df = df! { "uid" => [0, 0, 0, 1, 1, 1], "day" => [1, 2, 4, 1, 2, 3], "cumcases" => [10, 12, 15, 25, 30, 41] } .unwrap(); let out = df .lazy() .groupby([col("uid")]) .agg([col("day").head(Some(2))]) .collect() .unwrap(); dbg!(&out); let s = out .select_at_idx(1) .unwrap() .list() .unwrap() .get(0) .unwrap(); assert_eq!(s.len(), 2); let s = out .select_at_idx(1) .unwrap() .list() .unwrap() .get(0) .unwrap(); assert_eq!(s.len(), 2); } #[test] #[cfg(feature = "is_in")] fn test_lazy_query_8() -> Result<()> { // https://github.com/pola-rs/polars/issues/842 let df = df![ "A" => [1, 2, 3], "B" => [1, 2, 3], "C" => [1, 2, 3], "D" => [1, 2, 3], "E" => [1, 2, 3] ]?; let mut selection = vec![]; for c in &["A", "B", "C", "D", "E"] { let e = when(col(c).is_in(col("E"))) .then(col("A")) .otherwise(Null {}.lit()) .alias(c); selection.push(e); } let out = df .lazy() .select(selection) .filter(col("D").gt(lit(1))) .collect()?; assert_eq!(out.shape(), (2, 5)); Ok(()) } #[test] fn test_lazy_query_9() -> Result<()> { // https://github.com/pola-rs/polars/issues/958 let cities = df![ "Cities.City"=> ["Moscow", "Berlin", "Paris","Hamburg", "Lyon", "Novosibirsk"], "Cities.Population"=> [11.92, 3.645, 2.161, 1.841, 0.513, 1.511], "Cities.Country"=> ["Russia", "Germany", "France", "Germany", "France", "Russia"] ]?; let sales = df![ "Sales.City"=> ["Moscow", "Berlin", "Paris", "Moscow", "Berlin", "Paris", "Moscow", "Berlin", "Paris"], "Sales.Item"=> ["Item A", "Item A","Item A", "Item B", "Item B","Item B", "Item C", "Item C","Item C"], "Sales.Amount"=> [200, 180, 100, 3, 30, 20, 90, 130, 125] ]?; let out = sales .lazy() .join( cities.lazy(), [col("Sales.City")], [col("Cities.City")], JoinType::Inner, ) .groupby([col("Cities.Country")]) .agg([col("Sales.Amount").sum().alias("sum")]) .sort("sum", false) .collect()?; let vals = out .column("sum")? .i32()? .into_no_null_iter() .collect::<Vec<_>>(); assert_eq!(vals, &[245, 293, 340]); Ok(()) } #[test] #[cfg(all( feature = "temporal", feature = "dtype-datetime", feature = "dtype-date" ))] fn test_lazy_query_10() { use polars_core::export::chrono::Duration as ChronoDuration; let date = NaiveDate::from_ymd(2021, 3, 5); let x: Series = DatetimeChunked::from_naive_datetime( "x", [ NaiveDateTime::new(date, NaiveTime::from_hms(12, 0, 0)), NaiveDateTime::new(date, NaiveTime::from_hms(13, 0, 0)), NaiveDateTime::new(date, NaiveTime::from_hms(14, 0, 0)), ], TimeUnit::Nanoseconds, ) .into(); let y: Series = DatetimeChunked::from_naive_datetime( "y", [ NaiveDateTime::new(date, NaiveTime::from_hms(11, 0, 0)), NaiveDateTime::new(date, NaiveTime::from_hms(11, 0, 0)), NaiveDateTime::new(date, NaiveTime::from_hms(11, 0, 0)), ], TimeUnit::Nanoseconds, ) .into(); let df = DataFrame::new(vec![x, y]).unwrap(); let out = df .lazy() .select(&[(col("x") - col("y")).alias("z")]) .collect() .unwrap(); let z: Series = DurationChunked::from_duration( "z", [ ChronoDuration::hours(1), ChronoDuration::hours(2), ChronoDuration::hours(3), ], TimeUnit::Nanoseconds, ) .into(); assert!(out.column("z").unwrap().series_equal(&z)); let x: Series = DatetimeChunked::from_naive_datetime( "x", [ NaiveDateTime::new(date, NaiveTime::from_hms(2, 0, 0)), NaiveDateTime::new(date, NaiveTime::from_hms(3, 0, 0)), NaiveDateTime::new(date, NaiveTime::from_hms(4, 0, 0)), ], TimeUnit::Milliseconds, ) .into(); let y: Series = DatetimeChunked::from_naive_datetime( "y", [ NaiveDateTime::new(date, NaiveTime::from_hms(1, 0, 0)), NaiveDateTime::new(date, NaiveTime::from_hms(1, 0, 0)), NaiveDateTime::new(date, NaiveTime::from_hms(1, 0, 0)), ], TimeUnit::Nanoseconds, ) .into(); let df = DataFrame::new(vec![x, y]).unwrap(); let out = df .lazy() .select(&[(col("x") - col("y")).alias("z")]) .collect() .unwrap(); assert!(out .column("z") .unwrap() .series_equal(&z.cast(&DataType::Duration(TimeUnit::Milliseconds)).unwrap())); } #[test] #[cfg(all( feature = "temporal", feature = "dtype-date", feature = "dtype-datetime" ))] fn test_lazy_query_7() { let date = NaiveDate::from_ymd(2021, 3, 5); let dates = [ NaiveDateTime::new(date, NaiveTime::from_hms(12, 0, 0)), NaiveDateTime::new(date, NaiveTime::from_hms(12, 1, 0)), NaiveDateTime::new(date, NaiveTime::from_hms(12, 2, 0)), NaiveDateTime::new(date, NaiveTime::from_hms(12, 3, 0)), NaiveDateTime::new(date, NaiveTime::from_hms(12, 4, 0)), NaiveDateTime::new(date, NaiveTime::from_hms(12, 5, 0)), ]; let data = vec![Some(1.), Some(2.), Some(3.), Some(4.), None, None]; let df = DataFrame::new(vec![ DatetimeChunked::from_naive_datetime("date", dates, TimeUnit::Nanoseconds).into(), Series::new("data", data), ]) .unwrap(); // this tests if predicate pushdown not interferes with the shift data. let out = df .lazy() .with_column(col("data").shift(-1).alias("output")) .with_column(col("output").shift(2).alias("shifted")) .filter(col("date").gt(lit(NaiveDateTime::new(date, NaiveTime::from_hms(12, 2, 0))))) .collect() .unwrap(); let a = out.column("shifted").unwrap().sum::<f64>().unwrap() - 7.0; assert!(a < 0.01 && a > -0.01); } #[test] fn test_lazy_shift_and_fill_all() { let data = &[1, 2, 3]; let df = DataFrame::new(vec![Series::new("data", data)]).unwrap(); let out = df .lazy() .with_column(col("data").shift(1).fill_null(lit(0)).alias("output")) .collect() .unwrap(); assert_eq!( Vec::from(out.column("output").unwrap().i32().unwrap()), vec![Some(0), Some(1), Some(2)] ); } #[test] fn test_lazy_shift_operation_no_filter() { // check if predicate pushdown optimization does not fail let df = df! { "a" => &[1, 2, 3], "b" => &[1, 2, 3] } .unwrap(); df.lazy() .with_column(col("b").shift(1).alias("output")) .collect() .unwrap(); } #[test] fn test_simplify_expr() { // Test if expression containing literals is simplified let df = get_df(); let plan = df .lazy() .select(&[lit(1.0f32) + lit(1.0f32) + col("sepal.width")]) .logical_plan; let mut expr_arena = Arena::new(); let mut lp_arena = Arena::new(); let rules: &mut [Box<dyn OptimizationRule>] = &mut [Box::new(SimplifyExprRule {})]; let optimizer = StackOptimizer {}; let mut lp_top = to_alp(plan, &mut expr_arena, &mut lp_arena); lp_top = optimizer.optimize_loop(rules, &mut expr_arena, &mut lp_arena, lp_top); let plan = node_to_lp(lp_top, &mut expr_arena, &mut lp_arena); assert!( matches!(plan, LogicalPlan::Projection{ expr, ..} if matches!(&expr[0], Expr::BinaryExpr{left, ..} if **left == Expr::Literal(LiteralValue::Float32(2.0)))) ); } #[test] fn test_lazy_wildcard() { let df = load_df(); let new = df.clone().lazy().select([col("*")]).collect().unwrap(); assert_eq!(new.shape(), (5, 3)); let new = df .lazy() .groupby([col("b")]) .agg([col("*").sum(), col("*").first()]) .collect() .unwrap(); assert_eq!(new.shape(), (3, 5)); // Should exclude b from wildcard aggregations. } #[test] fn test_lazy_reverse() { let df = load_df(); assert!(df .clone() .lazy() .reverse() .collect() .unwrap() .frame_equal_missing(&df.reverse())) } #[test] fn test_lazy_predicate_pushdown_binary_expr() { let df = load_df(); df.lazy() .filter(col("a").eq(col("b"))) .select([col("c")]) .collect() .unwrap(); } #[test] fn test_lazy_update_column() { let df = load_df(); df.lazy().with_column(col("a") / lit(10)).collect().unwrap(); } #[test] fn test_lazy_fill_null() { let df = df! { "a" => &[None, Some(2.0)], "b" => &[Some(1.0), None] } .unwrap(); let out = df.lazy().fill_null(lit(10.0)).collect().unwrap(); let correct = df! { "a" => &[Some(10.0), Some(2.0)], "b" => &[Some(1.0), Some(10.0)] } .unwrap(); assert!(out.frame_equal(&correct)); assert_eq!(out.get_column_names(), vec!["a", "b"]) } #[test] fn test_lazy_double_projection() { let df = df! { "foo" => &[1, 2, 3] } .unwrap(); df.lazy() .select([col("foo").alias("bar")]) .select([col("bar")]) .collect() .unwrap(); } #[test] fn test_type_coercion() { let df = df! { "foo" => &[1, 2, 3], "bar" => &[1.0, 2.0, 3.0] } .unwrap(); let lp = df.lazy().select([col("foo") * col("bar")]).logical_plan; let mut expr_arena = Arena::new(); let mut lp_arena = Arena::new(); let rules: &mut [Box<dyn OptimizationRule>] = &mut [Box::new(TypeCoercionRule {})]; let optimizer = StackOptimizer {}; let mut lp_top = to_alp(lp, &mut expr_arena, &mut lp_arena); lp_top = optimizer.optimize_loop(rules, &mut expr_arena, &mut lp_arena, lp_top); let lp = node_to_lp(lp_top, &mut expr_arena, &mut lp_arena); if let LogicalPlan::Projection { expr, .. } = lp { if let Expr::BinaryExpr { left, right, .. } = &expr[0] { assert!(matches!(&**left, Expr::Cast { .. })); // bar is already float, does not have to be coerced assert!(matches!(&**right, Expr::Column { .. })); } else { panic!() } }; } #[test] fn test_lazy_partition_agg() { let df = df! { "foo" => &[1, 1, 2, 2, 3], "bar" => &[1.0, 1.0, 2.0, 2.0, 3.0] } .unwrap(); let out = df .lazy() .groupby([col("foo")]) .agg([col("bar").mean()]) .sort("foo", false) .collect() .unwrap(); assert_eq!( Vec::from(out.column("bar").unwrap().f64().unwrap()), &[Some(1.0), Some(2.0), Some(3.0)] ); let out = scan_foods_csv() .groupby([col("category")]) .agg([col("calories").list()]) .sort("category", false) .collect() .unwrap(); let cat_agg_list = out.select_at_idx(1).unwrap(); let fruit_series = cat_agg_list.list().unwrap().get(0).unwrap(); let fruit_list = fruit_series.i64().unwrap(); assert_eq!( Vec::from(fruit_list), &[ Some(60), Some(30), Some(50), Some(30), Some(60), Some(130), Some(50), ] ) } #[test] fn test_lazy_groupby_apply() { let df = fruits_cars(); df.lazy() .groupby([col("fruits")]) .agg([col("cars").apply( |s: Series| Ok(Series::new("", &[s.len() as u32])), GetOutput::same_type(), )]) .collect() .unwrap(); } #[test] fn test_lazy_shift_and_fill() { let df = df! { "A" => &[1, 2, 3, 4, 5], "B" => &[5, 4, 3, 2, 1] } .unwrap(); let out = df .clone() .lazy() .with_column(col("A").shift_and_fill(2, col("B").mean())) .collect() .unwrap(); assert_eq!(out.column("A").unwrap().null_count(), 0); // shift from the other side let out = df .clone() .lazy() .with_column(col("A").shift_and_fill(-2, col("B").mean())) .collect() .unwrap(); assert_eq!(out.column("A").unwrap().null_count(), 0); let out = df .lazy() .shift_and_fill(-1, col("B").std()) .collect() .unwrap(); assert_eq!(out.column("A").unwrap().null_count(), 0); } #[test] fn test_lazy_groupby() { let df = df! { "a" => &[Some(1.0), None, Some(3.0), Some(4.0), Some(5.0)], "groups" => &["a", "a", "b", "c", "c"] } .unwrap(); let out = df .lazy() .groupby([col("groups")]) .agg([col("a").mean()]) .sort("a", false) .collect() .unwrap(); assert_eq!(out.column("a").unwrap().f64().unwrap().get(0), Some(1.0)); } #[test] fn test_lazy_tail() { let df = df! { "A" => &[1, 2, 3, 4, 5], "B" => &[5, 4, 3, 2, 1] } .unwrap(); let _out = df.lazy().tail(3).collect().unwrap(); } #[test] fn test_lazy_groupby_sort() { let df = df! { "a" => ["a", "b", "a", "b", "b", "c"], "b" => [1, 2, 3, 4, 5, 6] } .unwrap(); let out = df .clone() .lazy() .groupby([col("a")]) .agg([col("b").sort(false).first()]) .collect() .unwrap() .sort(["a"], false) .unwrap(); assert_eq!( Vec::from(out.column("b").unwrap().i32().unwrap()), [Some(1), Some(2), Some(6)] ); let out = df .lazy() .groupby([col("a")]) .agg([col("b").sort(false).last()]) .collect() .unwrap() .sort(["a"], false) .unwrap(); assert_eq!( Vec::from(out.column("b").unwrap().i32().unwrap()), [Some(3), Some(5), Some(6)] ); } #[test] fn test_lazy_groupby_sort_by() { let df = df! { "a" => ["a", "a", "a", "b", "b", "c"], "b" => [1, 2, 3, 4, 5, 6], "c" => [6, 1, 4, 3, 2, 1] } .unwrap(); let out = df .lazy() .groupby([col("a")]) .agg([col("b").sort_by([col("c")], [true]).first()]) .collect() .unwrap() .sort(["a"], false) .unwrap(); assert_eq!( Vec::from(out.column("b").unwrap().i32().unwrap()), [Some(1), Some(4), Some(6)] ); } #[test] #[cfg(feature = "dtype-datetime")] fn test_lazy_groupby_cast() { let df = df! { "a" => ["a", "a", "a", "b", "b", "c"], "b" => [1, 2, 3, 4, 5, 6] } .unwrap(); // test if it runs in groupby context let _out = df .lazy() .groupby([col("a")]) .agg([col("b") .mean() .cast(DataType::Datetime(TimeUnit::Nanoseconds, None))]) .collect() .unwrap(); } #[test] fn test_lazy_groupby_binary_expr() { let df = df! { "a" => ["a", "a", "a", "b", "b", "c"], "b" => [1, 2, 3, 4, 5, 6] } .unwrap(); // test if it runs in groupby context let out = df .lazy() .groupby([col("a")]) .agg([col("b").mean() * lit(2)]) .sort("a", false) .collect() .unwrap(); assert_eq!( Vec::from(out.column("b").unwrap().f64().unwrap()), [Some(4.0), Some(9.0), Some(12.0)] ); } #[test] fn test_lazy_groupby_filter() -> Result<()> { let df = df! { "a" => ["a", "a", "a", "b", "b", "c"], "b" => [1, 2, 3, 4, 5, 6] }?; // We test if the filters work in the groupby context // and that the aggregations can deal with empty sets let out = df .lazy() .groupby([col("a")]) .agg([ col("b").filter(col("a").eq(lit("a"))).sum().alias("b_sum"), col("b") .filter(col("a").eq(lit("a"))) .first() .alias("b_first"), col("b") .filter(col("a").eq(lit("e"))) .mean() .alias("b_mean"), col("b") .filter(col("a").eq(lit("a"))) .last() .alias("b_last"), ]) .sort("a", false) .collect()?; dbg!(&out); assert_eq!( Vec::from(out.column("b_sum").unwrap().i32().unwrap()), [Some(6), None, None] ); assert_eq!( Vec::from(out.column("b_first").unwrap().i32().unwrap()), [Some(1), None, None] ); assert_eq!( Vec::from(out.column("b_mean").unwrap().f64().unwrap()), [None, None, None] ); assert_eq!( Vec::from(out.column("b_last").unwrap().i32().unwrap()), [Some(3), None, None] ); Ok(()) } #[test] fn test_groupby_projection_pd_same_column() -> Result<()> { // this query failed when projection pushdown was enabled let a = || { let df = df![ "col1" => ["a", "ab", "abc"], "col2" => [1, 2, 3] ] .unwrap(); df.lazy() .select([col("col1").alias("foo"), col("col2").alias("bar")]) }; let out = a() .left_join(a(), col("foo"), col("foo")) .select([col("bar")]) .collect()?; let a = out.column("bar")?.i32()?; assert_eq!(Vec::from(a), &[Some(1), Some(2), Some(3)]); Ok(()) } #[test] fn test_groupby_sort_slice() -> Result<()> { let df = df![ "groups" => [1, 2, 2, 3, 3, 3], "vals" => [1, 5, 6, 3, 9, 8] ]?; // get largest two values per groups // expected: // group values // 1 1 // 2 6, 5 // 3 9, 8 let out1 = df .clone() .lazy() .sort("vals", true) .groupby([col("groups")]) .agg([col("vals").head(Some(2)).alias("foo")]) .sort("groups", false) .collect()?; let out2 = df .lazy() .groupby([col("groups")]) .agg([col("vals").sort(true).head(Some(2)).alias("foo")]) .sort("groups", false) .collect()?; assert!(out1.column("foo")?.series_equal(out2.column("foo")?)); dbg!(out1, out2); Ok(()) } #[test] fn test_groupby_cumsum() -> Result<()> { let df = df![ "groups" => [1, 2, 2, 3, 3, 3], "vals" => [1, 5, 6, 3, 9, 8] ]?; let out = df .lazy() .groupby([col("groups")]) .agg([col("vals").cumsum(false)]) .sort("groups", false) .collect()?; dbg!(&out); assert_eq!( Vec::from(out.column("vals")?.explode()?.i32()?), [1, 5, 11, 3, 12, 20] .iter() .copied() .map(Some) .collect::<Vec<_>>() ); Ok(()) } #[test] fn test_argsort_multiple() -> Result<()> { let df = df![ "int" => [1, 2, 3, 1, 2], "flt" => [3.0, 2.0, 1.0, 2.0, 1.0], "str" => ["a", "a", "a", "b", "b"] ]?; let out = df .clone() .lazy() .select([argsort_by([col("int"), col("flt")], &[true, false])]) .collect()?; assert_eq!( Vec::from(out.column("int")?.u32()?), [2, 4, 1, 3, 0] .iter() .copied() .map(Some) .collect::<Vec<_>>() ); // check if this runs let _out = df .lazy() .select([argsort_by([col("str"), col("flt")], &[true, false])]) .collect()?; Ok(()) } #[test] fn test_multiple_explode() -> Result<()> { let df = df![ "a" => [0, 1, 2, 0, 2], "b" => [5, 4, 3, 2, 1], "c" => [2, 3, 4, 1, 5] ]?; let out = df .lazy() .groupby([col("a")]) .agg([ col("b").list().alias("b_list"), col("c").list().alias("c_list"), ]) .explode([col("c_list"), col("b_list")]) .collect()?; assert_eq!(out.shape(), (5, 3)); Ok(()) } #[test] fn test_filter_and_alias() -> Result<()> { let df = df![ "a" => [0, 1, 2, 0, 2] ]?; let out = df .lazy() .with_column(col("a").pow(2.0).alias("a_squared")) .filter(col("a_squared").gt(lit(1)).and(col("a").gt(lit(1)))) .collect()?; let expected = df![ "a" => [2, 2], "a_squared" => [4, 4] ]?; assert!(out.frame_equal(&expected)); Ok(()) } #[test] fn test_filter_lit() { // see https://github.com/pola-rs/polars/issues/790 // failed due to broadcasting filters and splitting threads. let iter = (0..100).map(|i| ('A'..='Z').nth(i % 26).unwrap().to_string()); let a = Series::from_iter(iter); let df = DataFrame::new([a].into()).unwrap(); let out = df.lazy().filter(lit(true)).collect().unwrap(); assert_eq!(out.shape(), (100, 1)); } #[test] fn test_ternary_null() -> Result<()> { let df = df![ "a" => ["a", "b", "c"] ]?; let out = df .lazy() .select([when(col("a").eq(lit("c"))) .then(Null {}.lit()) .otherwise(col("a")) .alias("foo")]) .collect()?; assert_eq!( out.column("foo")?.is_null().into_iter().collect::<Vec<_>>(), &[Some(false), Some(false), Some(true)] ); Ok(()) } #[test] fn test_fill_forward() -> Result<()> { let df = df![ "a" => ["a", "b", "a"], "b" => [Some(1), None, None] ]?; let out = df .lazy() .select([col("b").forward_fill().list().over([col("a")])]) .collect()?; let agg = out.column("b")?.list()?; let a: Series = agg.get(0).unwrap(); assert!(a.series_equal(&Series::new("b", &[1, 1]))); let a: Series = agg.get(2).unwrap(); assert!(a.series_equal(&Series::new("b", &[1, 1]))); let a: Series = agg.get(1).unwrap(); assert_eq!(a.null_count(), 1); Ok(()) } #[cfg(feature = "cross_join")] #[test] fn test_cross_join() -> Result<()> { let df1 = df![ "a" => ["a", "b", "a"], "b" => [Some(1), None, None] ]?; let df2 = df![ "a" => [1, 2], "b" => [None, Some(12)] ]?; let out = df1.lazy().cross_join(df2.lazy()).collect()?; assert_eq!(out.shape(), (6, 4)); Ok(()) } #[test] fn test_fold_wildcard() -> Result<()> { let df1 = df![ "a" => [1, 2, 3], "b" => [1, 2, 3] ]?; let out = df1 .clone() .lazy() .select([fold_exprs(lit(0), |a, b| Ok(&a + &b), [col("*")]).alias("foo")]) .collect()?; assert_eq!( Vec::from(out.column("foo")?.i32()?), &[Some(2), Some(4), Some(6)] ); // test if we don't panic due to wildcard let _out = df1 .lazy() .select([all_exprs([col("*").is_not_null()])]) .collect()?; Ok(()) } #[test] fn test_select_empty_df() -> Result<()> { // https://github.com/pola-rs/polars/issues/1056 let df1 = df![ "a" => [1, 2, 3], "b" => [1, 2, 3] ]?; let out = df1 .lazy() .filter(col("a").eq(lit(0))) // this will lead to an empty frame .select([col("a"), lit(1).alias("c")]) .collect()?; assert_eq!(out.column("a")?.len(), 0); assert_eq!(out.column("c")?.len(), 0); Ok(()) } #[test] fn test_keep_name() -> Result<()> { let df = df![ "a" => [1, 2, 3], "b" => [1, 2, 3] ]?; let out = df .lazy() .select([ col("a").alias("bar").keep_name(), col("b").alias("bar").keep_name(), ]) .collect()?; assert_eq!(out.get_column_names(), &["a", "b"]); Ok(()) } #[test] fn test_exclude() -> Result<()> { let df = df![ "a" => [1, 2, 3], "b" => [1, 2, 3], "c" => [1, 2, 3] ]?; let out = df.lazy().select([col("*").exclude(&["b"])]).collect()?; assert_eq!(out.get_column_names(), &["a", "c"]); Ok(()) } #[test] #[cfg(feature = "regex")] fn test_regex_selection() -> Result<()> { let df = df![ "anton" => [1, 2, 3], "arnold schwars" => [1, 2, 3], "annie" => [1, 2, 3] ]?; let out = df.lazy().select([col("^a.*o.*$")]).collect()?; assert_eq!(out.get_column_names(), &["anton", "arnold schwars"]); Ok(()) } #[test] fn test_filter_in_groupby_agg() -> Result<()> { // This tests if the fitler is correctly handled by the binary expression. // This could lead to UB if it were not the case. The filter creates an empty column. // but the group tuples could still be untouched leading to out of bounds aggregation. let df = df![ "a" => [1, 1, 2], "b" => [1, 2, 3] ]?; let out = df .clone() .lazy() .groupby([col("a")]) .agg([(col("b").filter(col("b").eq(lit(100))) * lit(2)) .mean() .alias("b_mean")]) .collect()?; assert_eq!(out.column("b_mean")?.null_count(), 2); let out = df .lazy() .groupby([col("a")]) .agg([(col("b") .filter(col("b").eq(lit(100))) .map(Ok, GetOutput::same_type())) .mean() .alias("b_mean")]) .collect()?; assert_eq!(out.column("b_mean")?.null_count(), 2); Ok(()) } #[test] fn test_sort_by() -> Result<()> { let df = df![ "a" => [1, 2, 3, 4, 5], "b" => [1, 1, 1, 2, 2], "c" => [2, 3, 1, 2, 1] ]?; // evaluate let out = df .clone() .lazy() .select([col("a").sort_by([col("b"), col("c")], [false])]) .collect()?; let a = out.column("a")?; assert_eq!( Vec::from(a.i32().unwrap()), &[Some(3), Some(1), Some(2), Some(5), Some(4)] ); // aggregate let out = df .clone() .lazy() .groupby_stable([col("b")]) .agg([col("a").sort_by([col("b"), col("c")], [false])]) .collect()?; let a = out.column("a")?.explode()?; assert_eq!( Vec::from(a.i32().unwrap()), &[Some(3), Some(1), Some(2), Some(5), Some(4)] ); // evaluate_on_groups let out = df .lazy() .groupby_stable([col("b")]) .agg([col("a").sort_by([col("b"), col("c")], [false]).list()]) .collect()?; let a = out.column("a")?.explode()?; assert_eq!( Vec::from(a.i32().unwrap()), &[Some(3), Some(1), Some(2), Some(5), Some(4)] ); Ok(()) } #[test] fn test_filter_after_shift_in_groups() -> Result<()> { let df = fruits_cars(); let out = df .lazy() .select([ col("fruits"), col("B") .shift(1) .filter(col("B").shift(1).gt(lit(4))) .list() .over([col("fruits")]) .alias("filtered"), ]) .collect()?; assert_eq!( out.column("filtered")? .list()? .get(0) .unwrap() .i32()? .get(0) .unwrap(), 5 ); assert_eq!( out.column("filtered")? .list()? .get(1) .unwrap() .i32()? .get(0) .unwrap(), 5 ); assert_eq!(out.column("filtered")?.list()?.get(2).unwrap().len(), 0); Ok(()) } #[test] fn test_lazy_ternary_predicate_pushdown() -> Result<()> { let df = df![ "a" => &[10, 1, 2, 3] ]?; let out = df .lazy() .select([when(col("a").eq(lit(10))) .then(Null {}.lit()) .otherwise(col("a"))]) .drop_nulls(None) .collect()?; assert_eq!( Vec::from(out.get_columns()[0].i32()?), &[Some(1), Some(2), Some(3)] ); Ok(()) } #[test] fn test_categorical_addition() -> Result<()> { let df = fruits_cars(); // test if we can do that arithmetic operation with utf8 and categorical let out = df .lazy() .select([ col("fruits").cast(DataType::Categorical), col("cars").cast(DataType::Categorical), ]) .select([(col("fruits") + lit(" ") + col("cars")).alias("foo")]) .collect()?; assert_eq!(out.column("foo")?.utf8()?.get(0).unwrap(), "banana beetle"); Ok(()) } #[test] fn test_error_duplicate_names() { let df = fruits_cars(); assert!(df.lazy().select([col("*"), col("*"),]).collect().is_err()); } #[test] fn test_filter_count() -> Result<()> { let df = fruits_cars(); let out = df .lazy() .select([col("fruits") .filter(col("fruits").eq(lit("banana"))) .count()]) .collect()?; assert_eq!(out.column("fruits")?.u32()?.get(0), Some(3)); Ok(()) } #[test] #[cfg(feature = "dtype-i16")] fn test_groupby_small_ints() -> Result<()> { let df = df![ "id_32" => [1i32, 2], "id_16" => [1i16, 2] ]?; // https://github.com/pola-rs/polars/issues/1255 let out = df .lazy() .groupby([col("id_16"), col("id_32")]) .agg([col("id_16").sum().alias("foo")]) .sort("foo", true) .collect()?; assert_eq!(Vec::from(out.column("foo")?.i64()?), &[Some(2), Some(1)]); Ok(()) } #[test] fn test_when_then_schema() -> Result<()> { let df = fruits_cars(); let schema = df .lazy() .select([when(col("A").gt(lit(1))) .then(Null {}.lit()) .otherwise(col("A"))]) .schema(); assert_ne!(schema.fields()[0].data_type(), &DataType::Null); Ok(()) } #[test] fn test_singleton_broadcast() -> Result<()> { let df = fruits_cars(); let out = df .lazy() .select([col("fruits"), lit(1).alias("foo")]) .collect()?; assert!(out.column("foo")?.len() > 1); Ok(()) } #[test] fn test_sort_by_suffix() -> Result<()> { let df = fruits_cars(); let out = df .lazy() .select([col("*") .sort_by([col("A")], [false]) .list() .over([col("fruits")]) .flatten() .suffix("_sorted")]) .collect()?; let expected = df!( "A_sorted"=> [1, 2, 5, 3, 4], "fruits_sorted"=> ["banana", "banana", "banana", "apple", "apple"], "B_sorted"=> [5, 4, 1, 3, 2], "cars_sorted"=> ["beetle", "audi", "beetle", "beetle", "beetle"] )?; assert!(expected.frame_equal(&out)); Ok(()) } #[test] fn test_list_in_select_context() -> Result<()> { let s = Series::new("a", &[1, 2, 3]); let mut builder = get_list_builder(s.dtype(), s.len(), 1, s.name()); builder.append_series(&s); let expected = builder.finish().into_series(); let df = DataFrame::new(vec![s])?; let out = df.lazy().select([col("a").list()]).collect()?; let s = out.column("a")?; assert!(s.series_equal(&expected)); Ok(()) } #[test] fn test_round_after_agg() -> Result<()> { let df = fruits_cars(); let out = df .lazy() .groupby([col("fruits")]) .agg([col("A") .cast(DataType::Float32) .mean() .round(2) .alias("foo")]) .collect()?; assert!(out.column("foo")?.f32().is_ok()); let df = df![ "groups" => ["pigeon", "rabbit", "rabbit", "Chris", "pigeon", "fast", "fast", "pigeon", "rabbit", "Chris"], "b" => [5409, 4848, 4864, 3540, 8103, 3083, 8575, 9963, 8809, 5425], "c" => [0.4517241160719615, 0.2551467646274673, 0.8682045191407308, 0.9925316385786037, 0.5392027792928116, 0.7633847828107002, 0.7967295231651537, 0.01444779067224733, 0.23807484087472652, 0.10985868798350984] ]?; let out = df .lazy() .groupby_stable([col("groups")]) .agg([((col("b") * col("c")).sum() / col("b").sum()) .round(2) .alias("foo")]) .collect()?; let out = out.column("foo")?; let out = out.f64()?; assert_eq!( Vec::from(out), &[Some(0.3), Some(0.41), Some(0.46), Some(0.79)] ); Ok(()) } #[test] #[cfg(feature = "dtype-date")] fn test_fill_nan() -> Result<()> { let s0 = Series::new("date", &[1, 2, 3]).cast(&DataType::Date)?; let s1 = Series::new("float", &[Some(1.0), Some(f32::NAN), Some(3.0)]); let df = DataFrame::new(vec![s0, s1])?; let out = df.lazy().fill_nan(Null {}.lit()).collect()?; let out = out.column("float")?; assert_eq!(Vec::from(out.f32()?), &[Some(1.0), None, Some(3.0)]); Ok(()) } #[test] fn test_exclude_regex() -> Result<()> { let df = fruits_cars(); let out = df .lazy() .select([col("*").exclude(["^(fruits|cars)$"])]) .collect()?; assert_eq!(out.get_column_names(), &["A", "B"]); Ok(()) } #[test] fn test_groupby_rank() -> Result<()> { let df = fruits_cars(); let out = df .lazy() .groupby_stable([col("cars")]) .agg([col("B").rank(RankOptions { method: RankMethod::Dense, ..Default::default() })]) .collect()?; let out = out.column("B")?; let out = out.list()?.get(1).unwrap(); let out = out.u32()?; assert_eq!(Vec::from(out), &[Some(1)]); Ok(()) } #[test] fn test_apply_multiple_columns() -> Result<()> { let df = fruits_cars(); let multiply = |s: &mut [Series]| Ok(&s[0].pow(2.0).unwrap() * &s[1]); let out = df .clone() .lazy() .select([map_multiple( multiply, [col("A"), col("B")], GetOutput::from_type(DataType::Float64), )]) .collect()?; let out = out.column("A")?; let out = out.f64()?; assert_eq!( Vec::from(out), &[Some(5.0), Some(16.0), Some(27.0), Some(32.0), Some(25.0)] ); let out = df .lazy() .groupby_stable([col("cars")]) .agg([apply_multiple( multiply, [col("A"), col("B")], GetOutput::from_type(DataType::Float64), )]) .collect()?; let out = out.column("A")?; let out = out.list()?.get(1).unwrap(); let out = out.f64()?; assert_eq!(Vec::from(out), &[Some(16.0)]); Ok(()) } #[test] pub fn test_select_by_dtypes() -> Result<()> { let df = df![ "bools" => [true, false, true], "ints" => [1, 2, 3], "strings" => ["a", "b", "c"], "floats" => [1.0, 2.0, 3.0f32] ]?; let out = df .lazy() .select([dtype_cols([DataType::Float32, DataType::Utf8])]) .collect()?; assert_eq!(out.dtypes(), &[DataType::Float32, DataType::Utf8]); Ok(()) } #[test] fn test_binary_expr() -> Result<()> { // test panic in schema names let df = fruits_cars(); let _ = df.lazy().select([col("A").neq(lit(1))]).collect()?; // test type coercion // https://github.com/pola-rs/polars/issues/1649 let df = df!( "nrs"=> [Some(1i64), Some(2), Some(3), None, Some(5)], "random"=> [0.1f64, 0.6, 0.2, 0.6, 0.3] )?; let out = df .lazy() .select([when(col("random").gt(lit(0.5))) .then(lit(2)) .otherwise(col("random")) .alias("other") * col("nrs").sum()]) .collect()?; assert_eq!(out.dtypes(), &[DataType::Float64]); Ok(()) } #[test] fn test_drop_and_select() -> Result<()> { let df = fruits_cars(); // we test that the schema is still correct for drop to work. // typically the projection is pushed to before the drop and then the drop may think that some // columns are still there to be projected // we test this on both dataframe scan and csv scan. let out = df .lazy() .drop_columns(["A", "B"]) .select([col("fruits")]) .collect()?; assert_eq!(out.get_column_names(), &["fruits"]); let out = scan_foods_csv() .drop_columns(["calories", "sugar_g"]) .select([col("category")]) .collect()?; assert_eq!(out.get_column_names(), &["category"]); Ok(()) } #[test] fn test_groupby_on_lists() -> Result<()> { let s0 = Series::new("", [1i32, 2, 3]); let s1 = Series::new("groups", [4i32, 5]); let mut builder = ListPrimitiveChunkedBuilder::<i32>::new("arrays", 10, 10, DataType::Int32); builder.append_series(&s0); builder.append_series(&s1); let s2 = builder.finish().into_series(); let df = DataFrame::new(vec![s1, s2])?; let out = df .clone() .lazy() .groupby([col("groups")]) .agg([col("arrays").first()]) .collect()?; assert_eq!( out.column("arrays")?.dtype(), &DataType::List(Box::new(DataType::Int32)) ); let out = df .clone() .lazy() .groupby([col("groups")]) .agg([col("arrays").list()]) .collect()?; assert_eq!( out.column("arrays")?.dtype(), &DataType::List(Box::new(DataType::List(Box::new(DataType::Int32)))) ); Ok(()) } #[test] fn test_single_group_result() -> Result<()> { // the argsort should not auto explode let df = df![ "a" => [1, 2], "b" => [1, 1] ]?; let out = df .lazy() .select([col("a").arg_sort(false).list().over([col("a")]).flatten()]) .collect()?; let a = out.column("a")?.u32()?; assert_eq!(Vec::from(a), &[Some(0), Some(0)]); Ok(()) } #[test] fn test_single_ranked_group() -> Result<()> { // tests type consistency of rank algorithm let df = df!["group" => [1, 2, 2], "value"=> [100, 50, 10] ]?; let out = df .lazy() .with_columns([col("value") .rank(RankOptions { method: RankMethod::Average, ..Default::default() }) .list() .over([col("group")])]) .collect()?; let out = out.column("value")?.explode()?; let out = out.f32()?; assert_eq!( Vec::from(out), &[Some(1.0), Some(2.0), Some(1.0), Some(2.0), Some(1.0)] ); Ok(()) } #[test] fn empty_df() -> Result<()> { let df = fruits_cars(); let df = df.filter(&BooleanChunked::full("", false, df.height()))?; df.lazy() .select([ col("A").shift(1).alias("1"), col("A").shift_and_fill(1, lit(1)).alias("2"), col("A").shift_and_fill(-1, lit(1)).alias("3"), col("A").fill_null(lit(1)).alias("4"), col("A").cumcount(false).alias("5"), col("A").diff(1, NullBehavior::Ignore).alias("6"), col("A").cummax(false).alias("7"), col("A").cummin(false).alias("8"), ]) .collect()?; Ok(()) } #[test] fn test_apply_flatten() -> Result<()> { let df = df![ "A"=> [1.1435, 2.223456, 3.44732, -1.5234, -2.1238, -3.2923], "B"=> ["a", "b", "a", "b", "a", "b"] ]?; let out = df .lazy() .groupby_stable([col("B")]) .agg([col("A").abs().sum().alias("A_sum")]) .collect()?; let out = out.column("A_sum")?; assert_eq!(out.get(0), AnyValue::Float64(6.71462)); assert_eq!(out.get(1), AnyValue::Float64(7.039156)); Ok(()) } #[test] #[cfg(feature = "is_in")] fn test_is_in() -> Result<()> { let df = fruits_cars(); // // this will be executed by apply let out = df .clone() .lazy() .groupby_stable([col("fruits")]) .agg([col("cars").is_in(col("cars").filter(col("cars").eq(lit("beetle"))))]) .collect()?; let out = out.column("cars").unwrap(); let out = out.explode()?; let out = out.bool().unwrap(); assert_eq!( Vec::from(out), &[Some(true), Some(false), Some(true), Some(true), Some(true)] ); // this will be executed by map let out = df .lazy() .groupby_stable([col("fruits")]) .agg([col("cars").is_in(lit(Series::new("a", ["beetle", "vw"])))]) .collect()?; let out = out.column("cars").unwrap(); let out = out.explode()?; let out = out.bool().unwrap(); assert_eq!( Vec::from(out), &[Some(true), Some(false), Some(true), Some(true), Some(true)] ); Ok(()) }
24.878186
163
0.471267
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_with_column() {\n let df = get_df()\n .lazy()\n .with_column(lit(10).alias(\"foo\"))\n .collect()\n .unwrap();\n println!(\"{:?}\", df);\n assert_eq!(df.width(), 6);\n assert!(df.column(\"foo\").is_ok());\n\n let df = get_df()\n .lazy()\n .with_column(lit(10).alias(\"foo\"))\n .select([col(\"foo\"), col(\"sepal.width\")])\n .collect()\n .unwrap();\n println!(\"{:?}\", df);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_exec() {\n let df = get_df();\n let new = df\n .clone()\n .lazy()\n .select([col(\"sepal.width\"), col(\"variety\")])\n .sort(\"sepal.width\", false)\n .collect();\n println!(\"{:?}\", new);\n\n let new = df\n .lazy()\n .filter(not(col(\"sepal.width\").lt(lit(3.5))))\n .collect()\n .unwrap();\n\n let check = new.column(\"sepal.width\").unwrap().f64().unwrap().gt(3.4);\n assert!(check.all())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_alias() {\n let df = get_df();\n let new = df\n .lazy()\n .select([col(\"sepal.width\").alias(\"petals\"), col(\"sepal.width\")])\n .collect()\n .unwrap();\n assert_eq!(new.get_column_names(), &[\"petals\", \"sepal.width\"]);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_melt() {\n let df = get_df();\n let out = df\n .lazy()\n .melt(\n vec![\"petal.width\".to_string(), \"petal.length\".to_string()],\n vec![\"sepal.length\".to_string(), \"sepal.width\".to_string()],\n )\n .filter(col(\"variable\").eq(lit(\"sepal.length\")))\n .select([col(\"variable\"), col(\"petal.width\"), col(\"value\")])\n .collect()\n .unwrap();\n assert_eq!(out.shape(), (7, 3));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_drop_nulls() {\n let df = df! {\n \"foo\" => &[Some(1), None, Some(3)],\n \"bar\" => &[Some(1), Some(2), None]\n }\n .unwrap();\n\n let new = df.lazy().drop_nulls(None).collect().unwrap();\n let out = df! {\n \"foo\" => &[Some(1)],\n \"bar\" => &[Some(1)]\n }\n .unwrap();\n assert!(new.frame_equal(&out));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_udf() {\n let df = get_df();\n let new = df\n .lazy()\n .select([col(\"sepal.width\").map(|s| Ok(s * 200.0), GetOutput::same_type())])\n .collect()\n .unwrap();\n assert_eq!(\n new.column(\"sepal.width\").unwrap().f64().unwrap().get(0),\n Some(700.0)\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_is_null() {\n let df = get_df();\n let new = df\n .clone()\n .lazy()\n .filter(col(\"sepal.width\").is_null())\n .collect()\n .unwrap();\n\n assert_eq!(new.height(), 0);\n\n let new = df\n .clone()\n .lazy()\n .filter(col(\"sepal.width\").is_not_null())\n .collect()\n .unwrap();\n assert_eq!(new.height(), df.height());\n\n let new = df\n .lazy()\n .groupby([col(\"variety\")])\n .agg([col(\"sepal.width\").min()])\n .collect()\n .unwrap();\n\n println!(\"{:?}\", new);\n assert_eq!(new.shape(), (1, 2));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_pushdown_through_agg() {\n // An aggregation changes the schema names, check if the pushdown succeeds.\n let df = get_df();\n let new = df\n .lazy()\n .groupby([col(\"variety\")])\n .agg([\n col(\"sepal.length\").min(),\n col(\"petal.length\").min().alias(\"foo\"),\n ])\n .select([col(\"foo\")])\n // second selection is to test if optimizer can handle that\n .select([col(\"foo\").alias(\"bar\")])\n .collect()\n .unwrap();\n\n println!(\"{:?}\", new);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_agg() {\n let s0 = DateChunked::parse_from_str_slice(\n \"date\",\n &[\n \"2020-08-21\",\n \"2020-08-21\",\n \"2020-08-22\",\n \"2020-08-23\",\n \"2020-08-22\",\n ],\n \"%Y-%m-%d\",\n )\n .into_series();\n let s1 = Series::new(\"temp\", [20, 10, 7, 9, 1].as_ref());\n let s2 = Series::new(\"rain\", [0.2, 0.1, 0.3, 0.1, 0.01].as_ref());\n let df = DataFrame::new(vec![s0, s1, s2]).unwrap();\n\n let lf = df\n .lazy()\n .groupby([col(\"date\")])\n .agg([\n col(\"rain\").min().alias(\"min\"),\n col(\"rain\").sum().alias(\"sum\"),\n col(\"rain\")\n .quantile(0.5, QuantileInterpolOptions::default())\n .alias(\"median_rain\"),\n ])\n .sort(\"date\", false);\n\n let new = lf.collect().unwrap();\n dbg!(new);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_shift() {\n let df = get_df();\n let new = df\n .lazy()\n .select([col(\"sepal.width\").alias(\"foo\").shift(2)])\n .collect()\n .unwrap();\n assert_eq!(new.column(\"foo\").unwrap().f64().unwrap().get(0), None);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_shift_and_fill() -> Result<()> {\n let out = df![\n \"a\" => [1, 2, 3]\n ]?\n .lazy()\n .select([col(\"a\").shift_and_fill(-1, lit(5))])\n .collect()?;\n\n let out = out.column(\"a\")?;\n assert_eq!(Vec::from(out.i32()?), &[Some(2), Some(3), Some(5)]);\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_ternary_and_predicates() {\n let df = get_df();\n // test if this runs. This failed because is_not_null changes the schema name, so we\n // really need to check the root column\n let ldf = df\n .clone()\n .lazy()\n .with_column(lit(3).alias(\"foo\"))\n .filter(col(\"foo\").is_not_null());\n let _new = ldf.collect().unwrap();\n\n let ldf = df\n .lazy()\n .with_column(\n when(col(\"sepal.length\").lt(lit(5.0)))\n .then(\n lit(3), // is another type on purpose to check type coercion\n )\n .otherwise(col(\"sepal.width\"))\n .alias(\"foo\"),\n )\n .filter(col(\"foo\").gt(lit(3.0)));\n\n let new = ldf.collect().unwrap();\n dbg!(new);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_binary_ops() {\n let df = df!(\"a\" => &[1, 2, 3, 4, 5, ]).unwrap();\n let new = df\n .lazy()\n .select([col(\"a\").eq(lit(2)).alias(\"foo\")])\n .collect()\n .unwrap();\n assert_eq!(new.column(\"foo\").unwrap().sum::<i32>(), Some(1));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_query_1() {\n // test on aggregation pushdown\n // and a filter that is not in the projection\n let df_a = load_df();\n let df_b = df_a.clone();\n df_a.lazy()\n .left_join(df_b.lazy(), col(\"b\"), col(\"b\"))\n .filter(col(\"a\").lt(lit(2)))\n .groupby([col(\"b\")])\n .agg([col(\"b\").first(), col(\"c\").first()])\n .select([col(\"b\"), col(\"c\")])\n .collect()\n .unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_query_2() {\n let df = load_df();\n let ldf = df\n .lazy()\n .with_column(\n col(\"a\")\n .map(|s| Ok(s * 2), GetOutput::same_type())\n .alias(\"foo\"),\n )\n .filter(col(\"a\").lt(lit(2)))\n .select([col(\"b\"), col(\"a\")]);\n\n let new = ldf.collect().unwrap();\n assert_eq!(new.shape(), (1, 2));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_query_3() {\n // query checks if schema of scanning is not changed by aggregation\n let _ = scan_foods_csv()\n .groupby([col(\"calories\")])\n .agg([col(\"fats_g\").max()])\n .collect()\n .unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_query_4() {\n let df = df! {\n \"uid\" => [0, 0, 0, 1, 1, 1],\n \"day\" => [1, 2, 3, 1, 2, 3],\n \"cumcases\" => [10, 12, 15, 25, 30, 41]\n }\n .unwrap();\n\n let base_df = df.lazy();\n\n let out = base_df\n .clone()\n .groupby([col(\"uid\")])\n .agg([\n col(\"day\").list().alias(\"day\"),\n col(\"cumcases\")\n .apply(|s: Series| Ok(&s - &(s.shift(1))), GetOutput::same_type())\n .alias(\"diff_cases\"),\n ])\n .explode([col(\"day\"), col(\"diff_cases\")])\n .join(\n base_df,\n [col(\"uid\"), col(\"day\")],\n [col(\"uid\"), col(\"day\")],\n JoinType::Inner,\n )\n .collect()\n .unwrap();\n assert_eq!(\n Vec::from(out.column(\"diff_cases\").unwrap().i32().unwrap()),\n &[None, Some(2), Some(3), None, Some(5), Some(11)]\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_query_5() {\n // if this one fails, the list builder probably does not handle offsets\n let df = df! {\n \"uid\" => [0, 0, 0, 1, 1, 1],\n \"day\" => [1, 2, 4, 1, 2, 3],\n \"cumcases\" => [10, 12, 15, 25, 30, 41]\n }\n .unwrap();\n\n let out = df\n .lazy()\n .groupby([col(\"uid\")])\n .agg([col(\"day\").head(Some(2))])\n .collect()\n .unwrap();\n dbg!(&out);\n let s = out\n .select_at_idx(1)\n .unwrap()\n .list()\n .unwrap()\n .get(0)\n .unwrap();\n assert_eq!(s.len(), 2);\n let s = out\n .select_at_idx(1)\n .unwrap()\n .list()\n .unwrap()\n .get(0)\n .unwrap();\n assert_eq!(s.len(), 2);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_query_8() -> Result<()> {\n // https://github.com/pola-rs/polars/issues/842\n let df = df![\n \"A\" => [1, 2, 3],\n \"B\" => [1, 2, 3],\n \"C\" => [1, 2, 3],\n \"D\" => [1, 2, 3],\n \"E\" => [1, 2, 3]\n ]?;\n\n let mut selection = vec![];\n\n for c in &[\"A\", \"B\", \"C\", \"D\", \"E\"] {\n let e = when(col(c).is_in(col(\"E\")))\n .then(col(\"A\"))\n .otherwise(Null {}.lit())\n .alias(c);\n selection.push(e);\n }\n\n let out = df\n .lazy()\n .select(selection)\n .filter(col(\"D\").gt(lit(1)))\n .collect()?;\n assert_eq!(out.shape(), (2, 5));\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_query_9() -> Result<()> {\n // https://github.com/pola-rs/polars/issues/958\n let cities = df![\n \"Cities.City\"=> [\"Moscow\", \"Berlin\", \"Paris\",\"Hamburg\", \"Lyon\", \"Novosibirsk\"],\n \"Cities.Population\"=> [11.92, 3.645, 2.161, 1.841, 0.513, 1.511],\n \"Cities.Country\"=> [\"Russia\", \"Germany\", \"France\", \"Germany\", \"France\", \"Russia\"]\n ]?;\n\n let sales = df![\n \"Sales.City\"=> [\"Moscow\", \"Berlin\", \"Paris\", \"Moscow\", \"Berlin\", \"Paris\", \"Moscow\", \"Berlin\", \"Paris\"],\n \"Sales.Item\"=> [\"Item A\", \"Item A\",\"Item A\",\n \"Item B\", \"Item B\",\"Item B\",\n \"Item C\", \"Item C\",\"Item C\"],\n \"Sales.Amount\"=> [200, 180, 100,\n 3, 30, 20,\n 90, 130, 125]\n ]?;\n\n let out = sales\n .lazy()\n .join(\n cities.lazy(),\n [col(\"Sales.City\")],\n [col(\"Cities.City\")],\n JoinType::Inner,\n )\n .groupby([col(\"Cities.Country\")])\n .agg([col(\"Sales.Amount\").sum().alias(\"sum\")])\n .sort(\"sum\", false)\n .collect()?;\n let vals = out\n .column(\"sum\")?\n .i32()?\n .into_no_null_iter()\n .collect::<Vec<_>>();\n assert_eq!(vals, &[245, 293, 340]);\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_query_10() {\n use polars_core::export::chrono::Duration as ChronoDuration;\n let date = NaiveDate::from_ymd(2021, 3, 5);\n let x: Series = DatetimeChunked::from_naive_datetime(\n \"x\",\n [\n NaiveDateTime::new(date, NaiveTime::from_hms(12, 0, 0)),\n NaiveDateTime::new(date, NaiveTime::from_hms(13, 0, 0)),\n NaiveDateTime::new(date, NaiveTime::from_hms(14, 0, 0)),\n ],\n TimeUnit::Nanoseconds,\n )\n .into();\n let y: Series = DatetimeChunked::from_naive_datetime(\n \"y\",\n [\n NaiveDateTime::new(date, NaiveTime::from_hms(11, 0, 0)),\n NaiveDateTime::new(date, NaiveTime::from_hms(11, 0, 0)),\n NaiveDateTime::new(date, NaiveTime::from_hms(11, 0, 0)),\n ],\n TimeUnit::Nanoseconds,\n )\n .into();\n let df = DataFrame::new(vec![x, y]).unwrap();\n let out = df\n .lazy()\n .select(&[(col(\"x\") - col(\"y\")).alias(\"z\")])\n .collect()\n .unwrap();\n let z: Series = DurationChunked::from_duration(\n \"z\",\n [\n ChronoDuration::hours(1),\n ChronoDuration::hours(2),\n ChronoDuration::hours(3),\n ],\n TimeUnit::Nanoseconds,\n )\n .into();\n assert!(out.column(\"z\").unwrap().series_equal(&z));\n let x: Series = DatetimeChunked::from_naive_datetime(\n \"x\",\n [\n NaiveDateTime::new(date, NaiveTime::from_hms(2, 0, 0)),\n NaiveDateTime::new(date, NaiveTime::from_hms(3, 0, 0)),\n NaiveDateTime::new(date, NaiveTime::from_hms(4, 0, 0)),\n ],\n TimeUnit::Milliseconds,\n )\n .into();\n let y: Series = DatetimeChunked::from_naive_datetime(\n \"y\",\n [\n NaiveDateTime::new(date, NaiveTime::from_hms(1, 0, 0)),\n NaiveDateTime::new(date, NaiveTime::from_hms(1, 0, 0)),\n NaiveDateTime::new(date, NaiveTime::from_hms(1, 0, 0)),\n ],\n TimeUnit::Nanoseconds,\n )\n .into();\n let df = DataFrame::new(vec![x, y]).unwrap();\n let out = df\n .lazy()\n .select(&[(col(\"x\") - col(\"y\")).alias(\"z\")])\n .collect()\n .unwrap();\n assert!(out\n .column(\"z\")\n .unwrap()\n .series_equal(&z.cast(&DataType::Duration(TimeUnit::Milliseconds)).unwrap()));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_query_7() {\n let date = NaiveDate::from_ymd(2021, 3, 5);\n let dates = [\n NaiveDateTime::new(date, NaiveTime::from_hms(12, 0, 0)),\n NaiveDateTime::new(date, NaiveTime::from_hms(12, 1, 0)),\n NaiveDateTime::new(date, NaiveTime::from_hms(12, 2, 0)),\n NaiveDateTime::new(date, NaiveTime::from_hms(12, 3, 0)),\n NaiveDateTime::new(date, NaiveTime::from_hms(12, 4, 0)),\n NaiveDateTime::new(date, NaiveTime::from_hms(12, 5, 0)),\n ];\n let data = vec![Some(1.), Some(2.), Some(3.), Some(4.), None, None];\n let df = DataFrame::new(vec![\n DatetimeChunked::from_naive_datetime(\"date\", dates, TimeUnit::Nanoseconds).into(),\n Series::new(\"data\", data),\n ])\n .unwrap();\n // this tests if predicate pushdown not interferes with the shift data.\n let out = df\n .lazy()\n .with_column(col(\"data\").shift(-1).alias(\"output\"))\n .with_column(col(\"output\").shift(2).alias(\"shifted\"))\n .filter(col(\"date\").gt(lit(NaiveDateTime::new(date, NaiveTime::from_hms(12, 2, 0)))))\n .collect()\n .unwrap();\n let a = out.column(\"shifted\").unwrap().sum::<f64>().unwrap() - 7.0;\n assert!(a < 0.01 && a > -0.01);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_shift_and_fill_all() {\n let data = &[1, 2, 3];\n let df = DataFrame::new(vec![Series::new(\"data\", data)]).unwrap();\n let out = df\n .lazy()\n .with_column(col(\"data\").shift(1).fill_null(lit(0)).alias(\"output\"))\n .collect()\n .unwrap();\n assert_eq!(\n Vec::from(out.column(\"output\").unwrap().i32().unwrap()),\n vec![Some(0), Some(1), Some(2)]\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_shift_operation_no_filter() {\n // check if predicate pushdown optimization does not fail\n let df = df! {\n \"a\" => &[1, 2, 3],\n \"b\" => &[1, 2, 3]\n }\n .unwrap();\n df.lazy()\n .with_column(col(\"b\").shift(1).alias(\"output\"))\n .collect()\n .unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_simplify_expr() {\n // Test if expression containing literals is simplified\n let df = get_df();\n\n let plan = df\n .lazy()\n .select(&[lit(1.0f32) + lit(1.0f32) + col(\"sepal.width\")])\n .logical_plan;\n\n let mut expr_arena = Arena::new();\n let mut lp_arena = Arena::new();\n let rules: &mut [Box<dyn OptimizationRule>] = &mut [Box::new(SimplifyExprRule {})];\n\n let optimizer = StackOptimizer {};\n let mut lp_top = to_alp(plan, &mut expr_arena, &mut lp_arena);\n lp_top = optimizer.optimize_loop(rules, &mut expr_arena, &mut lp_arena, lp_top);\n let plan = node_to_lp(lp_top, &mut expr_arena, &mut lp_arena);\n assert!(\n matches!(plan, LogicalPlan::Projection{ expr, ..} if matches!(&expr[0], Expr::BinaryExpr{left, ..} if **left == Expr::Literal(LiteralValue::Float32(2.0))))\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_wildcard() {\n let df = load_df();\n let new = df.clone().lazy().select([col(\"*\")]).collect().unwrap();\n assert_eq!(new.shape(), (5, 3));\n\n let new = df\n .lazy()\n .groupby([col(\"b\")])\n .agg([col(\"*\").sum(), col(\"*\").first()])\n .collect()\n .unwrap();\n assert_eq!(new.shape(), (3, 5)); // Should exclude b from wildcard aggregations.\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_reverse() {\n let df = load_df();\n assert!(df\n .clone()\n .lazy()\n .reverse()\n .collect()\n .unwrap()\n .frame_equal_missing(&df.reverse()))\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_predicate_pushdown_binary_expr() {\n let df = load_df();\n df.lazy()\n .filter(col(\"a\").eq(col(\"b\")))\n .select([col(\"c\")])\n .collect()\n .unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_update_column() {\n let df = load_df();\n df.lazy().with_column(col(\"a\") / lit(10)).collect().unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_fill_null() {\n let df = df! {\n \"a\" => &[None, Some(2.0)],\n \"b\" => &[Some(1.0), None]\n }\n .unwrap();\n let out = df.lazy().fill_null(lit(10.0)).collect().unwrap();\n let correct = df! {\n \"a\" => &[Some(10.0), Some(2.0)],\n \"b\" => &[Some(1.0), Some(10.0)]\n }\n .unwrap();\n assert!(out.frame_equal(&correct));\n assert_eq!(out.get_column_names(), vec![\"a\", \"b\"])\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_double_projection() {\n let df = df! {\n \"foo\" => &[1, 2, 3]\n }\n .unwrap();\n df.lazy()\n .select([col(\"foo\").alias(\"bar\")])\n .select([col(\"bar\")])\n .collect()\n .unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_type_coercion() {\n let df = df! {\n \"foo\" => &[1, 2, 3],\n \"bar\" => &[1.0, 2.0, 3.0]\n }\n .unwrap();\n\n let lp = df.lazy().select([col(\"foo\") * col(\"bar\")]).logical_plan;\n\n let mut expr_arena = Arena::new();\n let mut lp_arena = Arena::new();\n let rules: &mut [Box<dyn OptimizationRule>] = &mut [Box::new(TypeCoercionRule {})];\n\n let optimizer = StackOptimizer {};\n let mut lp_top = to_alp(lp, &mut expr_arena, &mut lp_arena);\n lp_top = optimizer.optimize_loop(rules, &mut expr_arena, &mut lp_arena, lp_top);\n let lp = node_to_lp(lp_top, &mut expr_arena, &mut lp_arena);\n\n if let LogicalPlan::Projection { expr, .. } = lp {\n if let Expr::BinaryExpr { left, right, .. } = &expr[0] {\n assert!(matches!(&**left, Expr::Cast { .. }));\n // bar is already float, does not have to be coerced\n assert!(matches!(&**right, Expr::Column { .. }));\n } else {\n panic!()\n }\n };\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_partition_agg() {\n let df = df! {\n \"foo\" => &[1, 1, 2, 2, 3],\n \"bar\" => &[1.0, 1.0, 2.0, 2.0, 3.0]\n }\n .unwrap();\n\n let out = df\n .lazy()\n .groupby([col(\"foo\")])\n .agg([col(\"bar\").mean()])\n .sort(\"foo\", false)\n .collect()\n .unwrap();\n\n assert_eq!(\n Vec::from(out.column(\"bar\").unwrap().f64().unwrap()),\n &[Some(1.0), Some(2.0), Some(3.0)]\n );\n\n let out = scan_foods_csv()\n .groupby([col(\"category\")])\n .agg([col(\"calories\").list()])\n .sort(\"category\", false)\n .collect()\n .unwrap();\n let cat_agg_list = out.select_at_idx(1).unwrap();\n let fruit_series = cat_agg_list.list().unwrap().get(0).unwrap();\n let fruit_list = fruit_series.i64().unwrap();\n assert_eq!(\n Vec::from(fruit_list),\n &[\n Some(60),\n Some(30),\n Some(50),\n Some(30),\n Some(60),\n Some(130),\n Some(50),\n ]\n )\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_groupby_apply() {\n let df = fruits_cars();\n\n df.lazy()\n .groupby([col(\"fruits\")])\n .agg([col(\"cars\").apply(\n |s: Series| Ok(Series::new(\"\", &[s.len() as u32])),\n GetOutput::same_type(),\n )])\n .collect()\n .unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_shift_and_fill() {\n let df = df! {\n \"A\" => &[1, 2, 3, 4, 5],\n \"B\" => &[5, 4, 3, 2, 1]\n }\n .unwrap();\n let out = df\n .clone()\n .lazy()\n .with_column(col(\"A\").shift_and_fill(2, col(\"B\").mean()))\n .collect()\n .unwrap();\n assert_eq!(out.column(\"A\").unwrap().null_count(), 0);\n\n // shift from the other side\n let out = df\n .clone()\n .lazy()\n .with_column(col(\"A\").shift_and_fill(-2, col(\"B\").mean()))\n .collect()\n .unwrap();\n assert_eq!(out.column(\"A\").unwrap().null_count(), 0);\n\n let out = df\n .lazy()\n .shift_and_fill(-1, col(\"B\").std())\n .collect()\n .unwrap();\n assert_eq!(out.column(\"A\").unwrap().null_count(), 0);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_groupby() {\n let df = df! {\n \"a\" => &[Some(1.0), None, Some(3.0), Some(4.0), Some(5.0)],\n \"groups\" => &[\"a\", \"a\", \"b\", \"c\", \"c\"]\n }\n .unwrap();\n\n let out = df\n .lazy()\n .groupby([col(\"groups\")])\n .agg([col(\"a\").mean()])\n .sort(\"a\", false)\n .collect()\n .unwrap();\n\n assert_eq!(out.column(\"a\").unwrap().f64().unwrap().get(0), Some(1.0));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_tail() {\n let df = df! {\n \"A\" => &[1, 2, 3, 4, 5],\n \"B\" => &[5, 4, 3, 2, 1]\n }\n .unwrap();\n\n let _out = df.lazy().tail(3).collect().unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_groupby_sort() {\n let df = df! {\n \"a\" => [\"a\", \"b\", \"a\", \"b\", \"b\", \"c\"],\n \"b\" => [1, 2, 3, 4, 5, 6]\n }\n .unwrap();\n\n let out = df\n .clone()\n .lazy()\n .groupby([col(\"a\")])\n .agg([col(\"b\").sort(false).first()])\n .collect()\n .unwrap()\n .sort([\"a\"], false)\n .unwrap();\n\n assert_eq!(\n Vec::from(out.column(\"b\").unwrap().i32().unwrap()),\n [Some(1), Some(2), Some(6)]\n );\n\n let out = df\n .lazy()\n .groupby([col(\"a\")])\n .agg([col(\"b\").sort(false).last()])\n .collect()\n .unwrap()\n .sort([\"a\"], false)\n .unwrap();\n\n assert_eq!(\n Vec::from(out.column(\"b\").unwrap().i32().unwrap()),\n [Some(3), Some(5), Some(6)]\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_groupby_sort_by() {\n let df = df! {\n \"a\" => [\"a\", \"a\", \"a\", \"b\", \"b\", \"c\"],\n \"b\" => [1, 2, 3, 4, 5, 6],\n \"c\" => [6, 1, 4, 3, 2, 1]\n }\n .unwrap();\n\n let out = df\n .lazy()\n .groupby([col(\"a\")])\n .agg([col(\"b\").sort_by([col(\"c\")], [true]).first()])\n .collect()\n .unwrap()\n .sort([\"a\"], false)\n .unwrap();\n\n assert_eq!(\n Vec::from(out.column(\"b\").unwrap().i32().unwrap()),\n [Some(1), Some(4), Some(6)]\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_groupby_cast() {\n let df = df! {\n \"a\" => [\"a\", \"a\", \"a\", \"b\", \"b\", \"c\"],\n \"b\" => [1, 2, 3, 4, 5, 6]\n }\n .unwrap();\n\n // test if it runs in groupby context\n let _out = df\n .lazy()\n .groupby([col(\"a\")])\n .agg([col(\"b\")\n .mean()\n .cast(DataType::Datetime(TimeUnit::Nanoseconds, None))])\n .collect()\n .unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_groupby_binary_expr() {\n let df = df! {\n \"a\" => [\"a\", \"a\", \"a\", \"b\", \"b\", \"c\"],\n \"b\" => [1, 2, 3, 4, 5, 6]\n }\n .unwrap();\n\n // test if it runs in groupby context\n let out = df\n .lazy()\n .groupby([col(\"a\")])\n .agg([col(\"b\").mean() * lit(2)])\n .sort(\"a\", false)\n .collect()\n .unwrap();\n assert_eq!(\n Vec::from(out.column(\"b\").unwrap().f64().unwrap()),\n [Some(4.0), Some(9.0), Some(12.0)]\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_groupby_filter() -> Result<()> {\n let df = df! {\n \"a\" => [\"a\", \"a\", \"a\", \"b\", \"b\", \"c\"],\n \"b\" => [1, 2, 3, 4, 5, 6]\n }?;\n\n // We test if the filters work in the groupby context\n // and that the aggregations can deal with empty sets\n\n let out = df\n .lazy()\n .groupby([col(\"a\")])\n .agg([\n col(\"b\").filter(col(\"a\").eq(lit(\"a\"))).sum().alias(\"b_sum\"),\n col(\"b\")\n .filter(col(\"a\").eq(lit(\"a\")))\n .first()\n .alias(\"b_first\"),\n col(\"b\")\n .filter(col(\"a\").eq(lit(\"e\")))\n .mean()\n .alias(\"b_mean\"),\n col(\"b\")\n .filter(col(\"a\").eq(lit(\"a\")))\n .last()\n .alias(\"b_last\"),\n ])\n .sort(\"a\", false)\n .collect()?;\n\n dbg!(&out);\n assert_eq!(\n Vec::from(out.column(\"b_sum\").unwrap().i32().unwrap()),\n [Some(6), None, None]\n );\n assert_eq!(\n Vec::from(out.column(\"b_first\").unwrap().i32().unwrap()),\n [Some(1), None, None]\n );\n assert_eq!(\n Vec::from(out.column(\"b_mean\").unwrap().f64().unwrap()),\n [None, None, None]\n );\n assert_eq!(\n Vec::from(out.column(\"b_last\").unwrap().i32().unwrap()),\n [Some(3), None, None]\n );\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_groupby_projection_pd_same_column() -> Result<()> {\n // this query failed when projection pushdown was enabled\n\n let a = || {\n let df = df![\n \"col1\" => [\"a\", \"ab\", \"abc\"],\n \"col2\" => [1, 2, 3]\n ]\n .unwrap();\n\n df.lazy()\n .select([col(\"col1\").alias(\"foo\"), col(\"col2\").alias(\"bar\")])\n };\n\n let out = a()\n .left_join(a(), col(\"foo\"), col(\"foo\"))\n .select([col(\"bar\")])\n .collect()?;\n\n let a = out.column(\"bar\")?.i32()?;\n assert_eq!(Vec::from(a), &[Some(1), Some(2), Some(3)]);\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_groupby_sort_slice() -> Result<()> {\n let df = df![\n \"groups\" => [1, 2, 2, 3, 3, 3],\n \"vals\" => [1, 5, 6, 3, 9, 8]\n ]?;\n // get largest two values per groups\n\n // expected:\n // group values\n // 1 1\n // 2 6, 5\n // 3 9, 8\n\n let out1 = df\n .clone()\n .lazy()\n .sort(\"vals\", true)\n .groupby([col(\"groups\")])\n .agg([col(\"vals\").head(Some(2)).alias(\"foo\")])\n .sort(\"groups\", false)\n .collect()?;\n\n let out2 = df\n .lazy()\n .groupby([col(\"groups\")])\n .agg([col(\"vals\").sort(true).head(Some(2)).alias(\"foo\")])\n .sort(\"groups\", false)\n .collect()?;\n\n assert!(out1.column(\"foo\")?.series_equal(out2.column(\"foo\")?));\n dbg!(out1, out2);\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_groupby_cumsum() -> Result<()> {\n let df = df![\n \"groups\" => [1, 2, 2, 3, 3, 3],\n \"vals\" => [1, 5, 6, 3, 9, 8]\n ]?;\n\n let out = df\n .lazy()\n .groupby([col(\"groups\")])\n .agg([col(\"vals\").cumsum(false)])\n .sort(\"groups\", false)\n .collect()?;\n\n dbg!(&out);\n\n assert_eq!(\n Vec::from(out.column(\"vals\")?.explode()?.i32()?),\n [1, 5, 11, 3, 12, 20]\n .iter()\n .copied()\n .map(Some)\n .collect::<Vec<_>>()\n );\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_argsort_multiple() -> Result<()> {\n let df = df![\n \"int\" => [1, 2, 3, 1, 2],\n \"flt\" => [3.0, 2.0, 1.0, 2.0, 1.0],\n \"str\" => [\"a\", \"a\", \"a\", \"b\", \"b\"]\n ]?;\n\n let out = df\n .clone()\n .lazy()\n .select([argsort_by([col(\"int\"), col(\"flt\")], &[true, false])])\n .collect()?;\n\n assert_eq!(\n Vec::from(out.column(\"int\")?.u32()?),\n [2, 4, 1, 3, 0]\n .iter()\n .copied()\n .map(Some)\n .collect::<Vec<_>>()\n );\n\n // check if this runs\n let _out = df\n .lazy()\n .select([argsort_by([col(\"str\"), col(\"flt\")], &[true, false])])\n .collect()?;\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_multiple_explode() -> Result<()> {\n let df = df![\n \"a\" => [0, 1, 2, 0, 2],\n \"b\" => [5, 4, 3, 2, 1],\n \"c\" => [2, 3, 4, 1, 5]\n ]?;\n\n let out = df\n .lazy()\n .groupby([col(\"a\")])\n .agg([\n col(\"b\").list().alias(\"b_list\"),\n col(\"c\").list().alias(\"c_list\"),\n ])\n .explode([col(\"c_list\"), col(\"b_list\")])\n .collect()?;\n assert_eq!(out.shape(), (5, 3));\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_filter_and_alias() -> Result<()> {\n let df = df![\n \"a\" => [0, 1, 2, 0, 2]\n ]?;\n\n let out = df\n .lazy()\n .with_column(col(\"a\").pow(2.0).alias(\"a_squared\"))\n .filter(col(\"a_squared\").gt(lit(1)).and(col(\"a\").gt(lit(1))))\n .collect()?;\n\n let expected = df![\n \"a\" => [2, 2],\n \"a_squared\" => [4, 4]\n ]?;\n\n assert!(out.frame_equal(&expected));\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_filter_lit() {\n // see https://github.com/pola-rs/polars/issues/790\n // failed due to broadcasting filters and splitting threads.\n let iter = (0..100).map(|i| ('A'..='Z').nth(i % 26).unwrap().to_string());\n let a = Series::from_iter(iter);\n let df = DataFrame::new([a].into()).unwrap();\n\n let out = df.lazy().filter(lit(true)).collect().unwrap();\n assert_eq!(out.shape(), (100, 1));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_ternary_null() -> Result<()> {\n let df = df![\n \"a\" => [\"a\", \"b\", \"c\"]\n ]?;\n\n let out = df\n .lazy()\n .select([when(col(\"a\").eq(lit(\"c\")))\n .then(Null {}.lit())\n .otherwise(col(\"a\"))\n .alias(\"foo\")])\n .collect()?;\n\n assert_eq!(\n out.column(\"foo\")?.is_null().into_iter().collect::<Vec<_>>(),\n &[Some(false), Some(false), Some(true)]\n );\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_fill_forward() -> Result<()> {\n let df = df![\n \"a\" => [\"a\", \"b\", \"a\"],\n \"b\" => [Some(1), None, None]\n ]?;\n\n let out = df\n .lazy()\n .select([col(\"b\").forward_fill().list().over([col(\"a\")])])\n .collect()?;\n let agg = out.column(\"b\")?.list()?;\n\n let a: Series = agg.get(0).unwrap();\n assert!(a.series_equal(&Series::new(\"b\", &[1, 1])));\n let a: Series = agg.get(2).unwrap();\n assert!(a.series_equal(&Series::new(\"b\", &[1, 1])));\n let a: Series = agg.get(1).unwrap();\n assert_eq!(a.null_count(), 1);\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_cross_join() -> Result<()> {\n let df1 = df![\n \"a\" => [\"a\", \"b\", \"a\"],\n \"b\" => [Some(1), None, None]\n ]?;\n\n let df2 = df![\n \"a\" => [1, 2],\n \"b\" => [None, Some(12)]\n ]?;\n\n let out = df1.lazy().cross_join(df2.lazy()).collect()?;\n assert_eq!(out.shape(), (6, 4));\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_fold_wildcard() -> Result<()> {\n let df1 = df![\n \"a\" => [1, 2, 3],\n \"b\" => [1, 2, 3]\n ]?;\n\n let out = df1\n .clone()\n .lazy()\n .select([fold_exprs(lit(0), |a, b| Ok(&a + &b), [col(\"*\")]).alias(\"foo\")])\n .collect()?;\n\n assert_eq!(\n Vec::from(out.column(\"foo\")?.i32()?),\n &[Some(2), Some(4), Some(6)]\n );\n\n // test if we don't panic due to wildcard\n let _out = df1\n .lazy()\n .select([all_exprs([col(\"*\").is_not_null()])])\n .collect()?;\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_select_empty_df() -> Result<()> {\n // https://github.com/pola-rs/polars/issues/1056\n let df1 = df![\n \"a\" => [1, 2, 3],\n \"b\" => [1, 2, 3]\n ]?;\n\n let out = df1\n .lazy()\n .filter(col(\"a\").eq(lit(0))) // this will lead to an empty frame\n .select([col(\"a\"), lit(1).alias(\"c\")])\n .collect()?;\n\n assert_eq!(out.column(\"a\")?.len(), 0);\n assert_eq!(out.column(\"c\")?.len(), 0);\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_keep_name() -> Result<()> {\n let df = df![\n \"a\" => [1, 2, 3],\n \"b\" => [1, 2, 3]\n ]?;\n\n let out = df\n .lazy()\n .select([\n col(\"a\").alias(\"bar\").keep_name(),\n col(\"b\").alias(\"bar\").keep_name(),\n ])\n .collect()?;\n\n assert_eq!(out.get_column_names(), &[\"a\", \"b\"]);\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_exclude() -> Result<()> {\n let df = df![\n \"a\" => [1, 2, 3],\n \"b\" => [1, 2, 3],\n \"c\" => [1, 2, 3]\n ]?;\n\n let out = df.lazy().select([col(\"*\").exclude(&[\"b\"])]).collect()?;\n\n assert_eq!(out.get_column_names(), &[\"a\", \"c\"]);\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_regex_selection() -> Result<()> {\n let df = df![\n \"anton\" => [1, 2, 3],\n \"arnold schwars\" => [1, 2, 3],\n \"annie\" => [1, 2, 3]\n ]?;\n\n let out = df.lazy().select([col(\"^a.*o.*$\")]).collect()?;\n\n assert_eq!(out.get_column_names(), &[\"anton\", \"arnold schwars\"]);\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_filter_in_groupby_agg() -> Result<()> {\n // This tests if the fitler is correctly handled by the binary expression.\n // This could lead to UB if it were not the case. The filter creates an empty column.\n // but the group tuples could still be untouched leading to out of bounds aggregation.\n let df = df![\n \"a\" => [1, 1, 2],\n \"b\" => [1, 2, 3]\n ]?;\n\n let out = df\n .clone()\n .lazy()\n .groupby([col(\"a\")])\n .agg([(col(\"b\").filter(col(\"b\").eq(lit(100))) * lit(2))\n .mean()\n .alias(\"b_mean\")])\n .collect()?;\n\n assert_eq!(out.column(\"b_mean\")?.null_count(), 2);\n\n let out = df\n .lazy()\n .groupby([col(\"a\")])\n .agg([(col(\"b\")\n .filter(col(\"b\").eq(lit(100)))\n .map(Ok, GetOutput::same_type()))\n .mean()\n .alias(\"b_mean\")])\n .collect()?;\n assert_eq!(out.column(\"b_mean\")?.null_count(), 2);\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_sort_by() -> Result<()> {\n let df = df![\n \"a\" => [1, 2, 3, 4, 5],\n \"b\" => [1, 1, 1, 2, 2],\n \"c\" => [2, 3, 1, 2, 1]\n ]?;\n\n // evaluate\n let out = df\n .clone()\n .lazy()\n .select([col(\"a\").sort_by([col(\"b\"), col(\"c\")], [false])])\n .collect()?;\n\n let a = out.column(\"a\")?;\n assert_eq!(\n Vec::from(a.i32().unwrap()),\n &[Some(3), Some(1), Some(2), Some(5), Some(4)]\n );\n\n // aggregate\n let out = df\n .clone()\n .lazy()\n .groupby_stable([col(\"b\")])\n .agg([col(\"a\").sort_by([col(\"b\"), col(\"c\")], [false])])\n .collect()?;\n let a = out.column(\"a\")?.explode()?;\n assert_eq!(\n Vec::from(a.i32().unwrap()),\n &[Some(3), Some(1), Some(2), Some(5), Some(4)]\n );\n\n // evaluate_on_groups\n let out = df\n .lazy()\n .groupby_stable([col(\"b\")])\n .agg([col(\"a\").sort_by([col(\"b\"), col(\"c\")], [false]).list()])\n .collect()?;\n\n let a = out.column(\"a\")?.explode()?;\n assert_eq!(\n Vec::from(a.i32().unwrap()),\n &[Some(3), Some(1), Some(2), Some(5), Some(4)]\n );\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_filter_after_shift_in_groups() -> Result<()> {\n let df = fruits_cars();\n\n let out = df\n .lazy()\n .select([\n col(\"fruits\"),\n col(\"B\")\n .shift(1)\n .filter(col(\"B\").shift(1).gt(lit(4)))\n .list()\n .over([col(\"fruits\")])\n .alias(\"filtered\"),\n ])\n .collect()?;\n\n assert_eq!(\n out.column(\"filtered\")?\n .list()?\n .get(0)\n .unwrap()\n .i32()?\n .get(0)\n .unwrap(),\n 5\n );\n assert_eq!(\n out.column(\"filtered\")?\n .list()?\n .get(1)\n .unwrap()\n .i32()?\n .get(0)\n .unwrap(),\n 5\n );\n assert_eq!(out.column(\"filtered\")?.list()?.get(2).unwrap().len(), 0);\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_ternary_predicate_pushdown() -> Result<()> {\n let df = df![\n \"a\" => &[10, 1, 2, 3]\n ]?;\n\n let out = df\n .lazy()\n .select([when(col(\"a\").eq(lit(10)))\n .then(Null {}.lit())\n .otherwise(col(\"a\"))])\n .drop_nulls(None)\n .collect()?;\n\n assert_eq!(\n Vec::from(out.get_columns()[0].i32()?),\n &[Some(1), Some(2), Some(3)]\n );\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_categorical_addition() -> Result<()> {\n let df = fruits_cars();\n\n // test if we can do that arithmetic operation with utf8 and categorical\n let out = df\n .lazy()\n .select([\n col(\"fruits\").cast(DataType::Categorical),\n col(\"cars\").cast(DataType::Categorical),\n ])\n .select([(col(\"fruits\") + lit(\" \") + col(\"cars\")).alias(\"foo\")])\n .collect()?;\n\n assert_eq!(out.column(\"foo\")?.utf8()?.get(0).unwrap(), \"banana beetle\");\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_error_duplicate_names() {\n let df = fruits_cars();\n assert!(df.lazy().select([col(\"*\"), col(\"*\"),]).collect().is_err());\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_filter_count() -> Result<()> {\n let df = fruits_cars();\n let out = df\n .lazy()\n .select([col(\"fruits\")\n .filter(col(\"fruits\").eq(lit(\"banana\")))\n .count()])\n .collect()?;\n assert_eq!(out.column(\"fruits\")?.u32()?.get(0), Some(3));\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_groupby_small_ints() -> Result<()> {\n let df = df![\n \"id_32\" => [1i32, 2],\n \"id_16\" => [1i16, 2]\n ]?;\n\n // https://github.com/pola-rs/polars/issues/1255\n let out = df\n .lazy()\n .groupby([col(\"id_16\"), col(\"id_32\")])\n .agg([col(\"id_16\").sum().alias(\"foo\")])\n .sort(\"foo\", true)\n .collect()?;\n\n assert_eq!(Vec::from(out.column(\"foo\")?.i64()?), &[Some(2), Some(1)]);\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_when_then_schema() -> Result<()> {\n let df = fruits_cars();\n\n let schema = df\n .lazy()\n .select([when(col(\"A\").gt(lit(1)))\n .then(Null {}.lit())\n .otherwise(col(\"A\"))])\n .schema();\n assert_ne!(schema.fields()[0].data_type(), &DataType::Null);\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_singleton_broadcast() -> Result<()> {\n let df = fruits_cars();\n let out = df\n .lazy()\n .select([col(\"fruits\"), lit(1).alias(\"foo\")])\n .collect()?;\n\n assert!(out.column(\"foo\")?.len() > 1);\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_sort_by_suffix() -> Result<()> {\n let df = fruits_cars();\n let out = df\n .lazy()\n .select([col(\"*\")\n .sort_by([col(\"A\")], [false])\n .list()\n .over([col(\"fruits\")])\n .flatten()\n .suffix(\"_sorted\")])\n .collect()?;\n\n let expected = df!(\n \"A_sorted\"=> [1, 2, 5, 3, 4],\n \"fruits_sorted\"=> [\"banana\", \"banana\", \"banana\", \"apple\", \"apple\"],\n \"B_sorted\"=> [5, 4, 1, 3, 2],\n \"cars_sorted\"=> [\"beetle\", \"audi\", \"beetle\", \"beetle\", \"beetle\"]\n )?;\n\n assert!(expected.frame_equal(&out));\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_list_in_select_context() -> Result<()> {\n let s = Series::new(\"a\", &[1, 2, 3]);\n let mut builder = get_list_builder(s.dtype(), s.len(), 1, s.name());\n builder.append_series(&s);\n let expected = builder.finish().into_series();\n\n let df = DataFrame::new(vec![s])?;\n\n let out = df.lazy().select([col(\"a\").list()]).collect()?;\n\n let s = out.column(\"a\")?;\n assert!(s.series_equal(&expected));\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_round_after_agg() -> Result<()> {\n let df = fruits_cars();\n\n let out = df\n .lazy()\n .groupby([col(\"fruits\")])\n .agg([col(\"A\")\n .cast(DataType::Float32)\n .mean()\n .round(2)\n .alias(\"foo\")])\n .collect()?;\n\n assert!(out.column(\"foo\")?.f32().is_ok());\n\n let df = df![\n \"groups\" => [\"pigeon\",\n \"rabbit\",\n \"rabbit\",\n \"Chris\",\n \"pigeon\",\n \"fast\",\n \"fast\",\n \"pigeon\",\n \"rabbit\",\n \"Chris\"],\n \"b\" => [5409, 4848, 4864, 3540, 8103, 3083, 8575, 9963, 8809, 5425],\n \"c\" => [0.4517241160719615,\n 0.2551467646274673,\n 0.8682045191407308,\n 0.9925316385786037,\n 0.5392027792928116,\n 0.7633847828107002,\n 0.7967295231651537,\n 0.01444779067224733,\n 0.23807484087472652,\n 0.10985868798350984]\n ]?;\n\n let out = df\n .lazy()\n .groupby_stable([col(\"groups\")])\n .agg([((col(\"b\") * col(\"c\")).sum() / col(\"b\").sum())\n .round(2)\n .alias(\"foo\")])\n .collect()?;\n\n let out = out.column(\"foo\")?;\n let out = out.f64()?;\n\n assert_eq!(\n Vec::from(out),\n &[Some(0.3), Some(0.41), Some(0.46), Some(0.79)]\n );\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_fill_nan() -> Result<()> {\n let s0 = Series::new(\"date\", &[1, 2, 3]).cast(&DataType::Date)?;\n let s1 = Series::new(\"float\", &[Some(1.0), Some(f32::NAN), Some(3.0)]);\n\n let df = DataFrame::new(vec![s0, s1])?;\n let out = df.lazy().fill_nan(Null {}.lit()).collect()?;\n let out = out.column(\"float\")?;\n assert_eq!(Vec::from(out.f32()?), &[Some(1.0), None, Some(3.0)]);\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_exclude_regex() -> Result<()> {\n let df = fruits_cars();\n let out = df\n .lazy()\n .select([col(\"*\").exclude([\"^(fruits|cars)$\"])])\n .collect()?;\n\n assert_eq!(out.get_column_names(), &[\"A\", \"B\"]);\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_groupby_rank() -> Result<()> {\n let df = fruits_cars();\n let out = df\n .lazy()\n .groupby_stable([col(\"cars\")])\n .agg([col(\"B\").rank(RankOptions {\n method: RankMethod::Dense,\n ..Default::default()\n })])\n .collect()?;\n\n let out = out.column(\"B\")?;\n let out = out.list()?.get(1).unwrap();\n let out = out.u32()?;\n\n assert_eq!(Vec::from(out), &[Some(1)]);\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_apply_multiple_columns() -> Result<()> {\n let df = fruits_cars();\n\n let multiply = |s: &mut [Series]| Ok(&s[0].pow(2.0).unwrap() * &s[1]);\n\n let out = df\n .clone()\n .lazy()\n .select([map_multiple(\n multiply,\n [col(\"A\"), col(\"B\")],\n GetOutput::from_type(DataType::Float64),\n )])\n .collect()?;\n let out = out.column(\"A\")?;\n let out = out.f64()?;\n assert_eq!(\n Vec::from(out),\n &[Some(5.0), Some(16.0), Some(27.0), Some(32.0), Some(25.0)]\n );\n\n let out = df\n .lazy()\n .groupby_stable([col(\"cars\")])\n .agg([apply_multiple(\n multiply,\n [col(\"A\"), col(\"B\")],\n GetOutput::from_type(DataType::Float64),\n )])\n .collect()?;\n\n let out = out.column(\"A\")?;\n let out = out.list()?.get(1).unwrap();\n let out = out.f64()?;\n\n assert_eq!(Vec::from(out), &[Some(16.0)]);\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n pub fn test_select_by_dtypes() -> Result<()> {\n let df = df![\n \"bools\" => [true, false, true],\n \"ints\" => [1, 2, 3],\n \"strings\" => [\"a\", \"b\", \"c\"],\n \"floats\" => [1.0, 2.0, 3.0f32]\n ]?;\n let out = df\n .lazy()\n .select([dtype_cols([DataType::Float32, DataType::Utf8])])\n .collect()?;\n assert_eq!(out.dtypes(), &[DataType::Float32, DataType::Utf8]);\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_binary_expr() -> Result<()> {\n // test panic in schema names\n let df = fruits_cars();\n let _ = df.lazy().select([col(\"A\").neq(lit(1))]).collect()?;\n\n // test type coercion\n // https://github.com/pola-rs/polars/issues/1649\n let df = df!(\n \"nrs\"=> [Some(1i64), Some(2), Some(3), None, Some(5)],\n \"random\"=> [0.1f64, 0.6, 0.2, 0.6, 0.3]\n )?;\n\n let out = df\n .lazy()\n .select([when(col(\"random\").gt(lit(0.5)))\n .then(lit(2))\n .otherwise(col(\"random\"))\n .alias(\"other\")\n * col(\"nrs\").sum()])\n .collect()?;\n assert_eq!(out.dtypes(), &[DataType::Float64]);\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_drop_and_select() -> Result<()> {\n let df = fruits_cars();\n\n // we test that the schema is still correct for drop to work.\n // typically the projection is pushed to before the drop and then the drop may think that some\n // columns are still there to be projected\n\n // we test this on both dataframe scan and csv scan.\n let out = df\n .lazy()\n .drop_columns([\"A\", \"B\"])\n .select([col(\"fruits\")])\n .collect()?;\n\n assert_eq!(out.get_column_names(), &[\"fruits\"]);\n\n let out = scan_foods_csv()\n .drop_columns([\"calories\", \"sugar_g\"])\n .select([col(\"category\")])\n .collect()?;\n\n assert_eq!(out.get_column_names(), &[\"category\"]);\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_groupby_on_lists() -> Result<()> {\n let s0 = Series::new(\"\", [1i32, 2, 3]);\n let s1 = Series::new(\"groups\", [4i32, 5]);\n\n let mut builder = ListPrimitiveChunkedBuilder::<i32>::new(\"arrays\", 10, 10, DataType::Int32);\n builder.append_series(&s0);\n builder.append_series(&s1);\n let s2 = builder.finish().into_series();\n\n let df = DataFrame::new(vec![s1, s2])?;\n let out = df\n .clone()\n .lazy()\n .groupby([col(\"groups\")])\n .agg([col(\"arrays\").first()])\n .collect()?;\n\n assert_eq!(\n out.column(\"arrays\")?.dtype(),\n &DataType::List(Box::new(DataType::Int32))\n );\n\n let out = df\n .clone()\n .lazy()\n .groupby([col(\"groups\")])\n .agg([col(\"arrays\").list()])\n .collect()?;\n\n assert_eq!(\n out.column(\"arrays\")?.dtype(),\n &DataType::List(Box::new(DataType::List(Box::new(DataType::Int32))))\n );\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_single_group_result() -> Result<()> {\n // the argsort should not auto explode\n let df = df![\n \"a\" => [1, 2],\n \"b\" => [1, 1]\n ]?;\n\n let out = df\n .lazy()\n .select([col(\"a\").arg_sort(false).list().over([col(\"a\")]).flatten()])\n .collect()?;\n\n let a = out.column(\"a\")?.u32()?;\n assert_eq!(Vec::from(a), &[Some(0), Some(0)]);\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_single_ranked_group() -> Result<()> {\n // tests type consistency of rank algorithm\n let df = df![\"group\" => [1, 2, 2],\n \"value\"=> [100, 50, 10]\n ]?;\n\n let out = df\n .lazy()\n .with_columns([col(\"value\")\n .rank(RankOptions {\n method: RankMethod::Average,\n ..Default::default()\n })\n .list()\n .over([col(\"group\")])])\n .collect()?;\n\n let out = out.column(\"value\")?.explode()?;\n let out = out.f32()?;\n assert_eq!(\n Vec::from(out),\n &[Some(1.0), Some(2.0), Some(1.0), Some(2.0), Some(1.0)]\n );\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn empty_df() -> Result<()> {\n let df = fruits_cars();\n let df = df.filter(&BooleanChunked::full(\"\", false, df.height()))?;\n\n df.lazy()\n .select([\n col(\"A\").shift(1).alias(\"1\"),\n col(\"A\").shift_and_fill(1, lit(1)).alias(\"2\"),\n col(\"A\").shift_and_fill(-1, lit(1)).alias(\"3\"),\n col(\"A\").fill_null(lit(1)).alias(\"4\"),\n col(\"A\").cumcount(false).alias(\"5\"),\n col(\"A\").diff(1, NullBehavior::Ignore).alias(\"6\"),\n col(\"A\").cummax(false).alias(\"7\"),\n col(\"A\").cummin(false).alias(\"8\"),\n ])\n .collect()?;\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_apply_flatten() -> Result<()> {\n let df = df![\n \"A\"=> [1.1435, 2.223456, 3.44732, -1.5234, -2.1238, -3.2923],\n \"B\"=> [\"a\", \"b\", \"a\", \"b\", \"a\", \"b\"]\n ]?;\n\n let out = df\n .lazy()\n .groupby_stable([col(\"B\")])\n .agg([col(\"A\").abs().sum().alias(\"A_sum\")])\n .collect()?;\n\n let out = out.column(\"A_sum\")?;\n assert_eq!(out.get(0), AnyValue::Float64(6.71462));\n assert_eq!(out.get(1), AnyValue::Float64(7.039156));\n\n Ok(())\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_is_in() -> Result<()> {\n let df = fruits_cars();\n\n // // this will be executed by apply\n let out = df\n .clone()\n .lazy()\n .groupby_stable([col(\"fruits\")])\n .agg([col(\"cars\").is_in(col(\"cars\").filter(col(\"cars\").eq(lit(\"beetle\"))))])\n .collect()?;\n let out = out.column(\"cars\").unwrap();\n let out = out.explode()?;\n let out = out.bool().unwrap();\n assert_eq!(\n Vec::from(out),\n &[Some(true), Some(false), Some(true), Some(true), Some(true)]\n );\n\n // this will be executed by map\n let out = df\n .lazy()\n .groupby_stable([col(\"fruits\")])\n .agg([col(\"cars\").is_in(lit(Series::new(\"a\", [\"beetle\", \"vw\"])))])\n .collect()?;\n\n let out = out.column(\"cars\").unwrap();\n let out = out.explode()?;\n let out = out.bool().unwrap();\n assert_eq!(\n Vec::from(out),\n &[Some(true), Some(false), Some(true), Some(true), Some(true)]\n );\n\n Ok(())\n}\n}" ]
f708df63deff027658ea244b4d70978f8c9ef75e
2,555
rs
Rust
tremor-script/src/std_lib/json.rs
m0n0chr0m3/tremor-runtime
de785b5393388a614bc2e7c228dc0a7a98205636
[ "Apache-2.0" ]
null
null
null
tremor-script/src/std_lib/json.rs
m0n0chr0m3/tremor-runtime
de785b5393388a614bc2e7c228dc0a7a98205636
[ "Apache-2.0" ]
null
null
null
tremor-script/src/std_lib/json.rs
m0n0chr0m3/tremor-runtime
de785b5393388a614bc2e7c228dc0a7a98205636
[ "Apache-2.0" ]
null
null
null
// Copyright 2018-2020, Wayfair GmbH // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use crate::registry::Registry; use crate::tremor_const_fn; use simd_json::to_owned_value; pub fn load(registry: &mut Registry) { registry .insert(tremor_const_fn! (json::decode(_context, _input: String) { // We need to clone here since we do not want to destroy the // original value let mut s: String = _input.to_string(); println!("{}", &s); // Screw you rust let mut bytes = unsafe{s.as_bytes_mut()}; // We need to do this since otherwise we depend on the clone of s to_owned_value(&mut bytes).map_err(to_runtime_error).map(Value::from) })) .insert(tremor_const_fn! (json::encode(_context, _input) { simd_json::to_string(_input).map(Value::from).map_err(to_runtime_error) })) .insert(tremor_const_fn! (json::encode_pretty(_context, _input) { simd_json::to_string_pretty(_input).map(Value::from).map_err(to_runtime_error) })); } #[cfg(test)] mod test { use crate::registry::fun; use simd_json::BorrowedValue as Value; macro_rules! assert_val { ($e:expr, $r:expr) => { assert_eq!($e, Ok(Value::from($r))) }; } #[test] fn decode() { let f = fun("json", "decode"); let v = Value::from(r#"["this","is","a","cake"]"#); assert_val!(f(&[&v]), Value::from(vec!["this", "is", "a", "cake"])); } #[test] fn encode() { let f = fun("json", "encode"); let v = Value::from(vec!["this", "is", "a", "cake"]); assert_val!(f(&[&v]), Value::from(r#"["this","is","a","cake"]"#)); } #[test] fn encode_pretty() { let f = fun("json", "encode_pretty"); let v = Value::from(vec!["this", "is", "a", "cake"]); assert_val!( f(&[&v]), Value::from( r#"[ "this", "is", "a", "cake" ]"# ) ); } }
32.75641
90
0.574168
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn decode() {\n let f = fun(\"json\", \"decode\");\n let v = Value::from(r#\"[\"this\",\"is\",\"a\",\"cake\"]\"#);\n assert_val!(f(&[&v]), Value::from(vec![\"this\", \"is\", \"a\", \"cake\"]));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn encode() {\n let f = fun(\"json\", \"encode\");\n let v = Value::from(vec![\"this\", \"is\", \"a\", \"cake\"]);\n assert_val!(f(&[&v]), Value::from(r#\"[\"this\",\"is\",\"a\",\"cake\"]\"#));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn encode_pretty() {\n let f = fun(\"json\", \"encode_pretty\");\n let v = Value::from(vec![\"this\", \"is\", \"a\", \"cake\"]);\n assert_val!(\n f(&[&v]),\n Value::from(\n r#\"[\n \"this\",\n \"is\",\n \"a\",\n \"cake\"\n]\"#\n )\n );\n }\n}" ]
f70906f84b8692b4e36f2d1145e197ba053d15f8
66,111
rs
Rust
src/libcollections/vec_deque.rs
chrish42/rust
4ce08a5d70a4dfc4da7ed0bc0098cfd2176b8411
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
6
2015-07-29T08:36:14.000Z
2017-06-27T19:42:04.000Z
src/libcollections/vec_deque.rs
chrish42/rust
4ce08a5d70a4dfc4da7ed0bc0098cfd2176b8411
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
2
2015-07-29T13:36:15.000Z
2022-01-29T07:27:02.000Z
src/libcollections/vec_deque.rs
chrish42/rust
4ce08a5d70a4dfc4da7ed0bc0098cfd2176b8411
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! VecDeque is a double-ended queue, which is implemented with the help of a //! growing ring buffer. //! //! This queue has `O(1)` amortized inserts and removals from both ends of the //! container. It also has `O(1)` indexing like a vector. The contained elements //! are not required to be copyable, and the queue will be sendable if the //! contained type is sendable. #![stable(feature = "rust1", since = "1.0.0")] use core::prelude::*; use core::cmp::Ordering; use core::fmt; use core::iter::{self, repeat, FromIterator, RandomAccessIterator}; use core::mem; use core::ops::{Index, IndexMut}; use core::ptr::{self, Unique}; use core::slice; use core::hash::{Hash, Hasher}; use core::cmp; use alloc::heap; const INITIAL_CAPACITY: usize = 7; // 2^3 - 1 const MINIMUM_CAPACITY: usize = 1; // 2 - 1 /// `VecDeque` is a growable ring buffer, which can be used as a /// double-ended queue efficiently. #[stable(feature = "rust1", since = "1.0.0")] pub struct VecDeque<T> { // tail and head are pointers into the buffer. Tail always points // to the first element that could be read, Head always points // to where data should be written. // If tail == head the buffer is empty. The length of the ringbuf // is defined as the distance between the two. tail: usize, head: usize, cap: usize, ptr: Unique<T>, } #[stable(feature = "rust1", since = "1.0.0")] impl<T: Clone> Clone for VecDeque<T> { fn clone(&self) -> VecDeque<T> { self.iter().cloned().collect() } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> Drop for VecDeque<T> { fn drop(&mut self) { self.clear(); unsafe { if mem::size_of::<T>() != 0 { heap::deallocate(*self.ptr as *mut u8, self.cap * mem::size_of::<T>(), mem::min_align_of::<T>()) } } } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> Default for VecDeque<T> { #[inline] fn default() -> VecDeque<T> { VecDeque::new() } } impl<T> VecDeque<T> { /// Turn ptr into a slice #[inline] unsafe fn buffer_as_slice(&self) -> &[T] { slice::from_raw_parts(*self.ptr, self.cap) } /// Turn ptr into a mut slice #[inline] unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] { slice::from_raw_parts_mut(*self.ptr, self.cap) } /// Moves an element out of the buffer #[inline] unsafe fn buffer_read(&mut self, off: usize) -> T { ptr::read(self.ptr.offset(off as isize)) } /// Writes an element into the buffer, moving it. #[inline] unsafe fn buffer_write(&mut self, off: usize, t: T) { ptr::write(self.ptr.offset(off as isize), t); } /// Returns true iff the buffer is at capacity #[inline] fn is_full(&self) -> bool { self.cap - self.len() == 1 } /// Returns the index in the underlying buffer for a given logical element /// index. #[inline] fn wrap_index(&self, idx: usize) -> usize { wrap_index(idx, self.cap) } /// Returns the index in the underlying buffer for a given logical element /// index + addend. #[inline] fn wrap_add(&self, idx: usize, addend: usize) -> usize { wrap_index(idx.wrapping_add(addend), self.cap) } /// Returns the index in the underlying buffer for a given logical element /// index - subtrahend. #[inline] fn wrap_sub(&self, idx: usize, subtrahend: usize) -> usize { wrap_index(idx.wrapping_sub(subtrahend), self.cap) } /// Copies a contiguous block of memory len long from src to dst #[inline] unsafe fn copy(&self, dst: usize, src: usize, len: usize) { debug_assert!(dst + len <= self.cap, "dst={} src={} len={} cap={}", dst, src, len, self.cap); debug_assert!(src + len <= self.cap, "dst={} src={} len={} cap={}", dst, src, len, self.cap); ptr::copy( self.ptr.offset(src as isize), self.ptr.offset(dst as isize), len); } /// Copies a contiguous block of memory len long from src to dst #[inline] unsafe fn copy_nonoverlapping(&self, dst: usize, src: usize, len: usize) { debug_assert!(dst + len <= self.cap, "dst={} src={} len={} cap={}", dst, src, len, self.cap); debug_assert!(src + len <= self.cap, "dst={} src={} len={} cap={}", dst, src, len, self.cap); ptr::copy_nonoverlapping( self.ptr.offset(src as isize), self.ptr.offset(dst as isize), len); } } impl<T> VecDeque<T> { /// Creates an empty `VecDeque`. #[stable(feature = "rust1", since = "1.0.0")] pub fn new() -> VecDeque<T> { VecDeque::with_capacity(INITIAL_CAPACITY) } /// Creates an empty `VecDeque` with space for at least `n` elements. #[stable(feature = "rust1", since = "1.0.0")] pub fn with_capacity(n: usize) -> VecDeque<T> { // +1 since the ringbuffer always leaves one space empty let cap = cmp::max(n + 1, MINIMUM_CAPACITY + 1).next_power_of_two(); assert!(cap > n, "capacity overflow"); let size = cap.checked_mul(mem::size_of::<T>()) .expect("capacity overflow"); let ptr = unsafe { if mem::size_of::<T>() != 0 { let ptr = heap::allocate(size, mem::min_align_of::<T>()) as *mut T;; if ptr.is_null() { ::alloc::oom() } Unique::new(ptr) } else { Unique::new(heap::EMPTY as *mut T) } }; VecDeque { tail: 0, head: 0, cap: cap, ptr: ptr, } } /// Retrieves an element in the `VecDeque` by index. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(3); /// buf.push_back(4); /// buf.push_back(5); /// assert_eq!(buf.get(1).unwrap(), &4); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn get(&self, i: usize) -> Option<&T> { if i < self.len() { let idx = self.wrap_add(self.tail, i); unsafe { Some(&*self.ptr.offset(idx as isize)) } } else { None } } /// Retrieves an element in the `VecDeque` mutably by index. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(3); /// buf.push_back(4); /// buf.push_back(5); /// if let Some(elem) = buf.get_mut(1) { /// *elem = 7; /// } /// /// assert_eq!(buf[1], 7); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn get_mut(&mut self, i: usize) -> Option<&mut T> { if i < self.len() { let idx = self.wrap_add(self.tail, i); unsafe { Some(&mut *self.ptr.offset(idx as isize)) } } else { None } } /// Swaps elements at indices `i` and `j`. /// /// `i` and `j` may be equal. /// /// Fails if there is no element with either index. /// /// # Examples /// /// ``` /// # #![feature(collections)] /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(3); /// buf.push_back(4); /// buf.push_back(5); /// buf.swap(0, 2); /// assert_eq!(buf[0], 5); /// assert_eq!(buf[2], 3); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn swap(&mut self, i: usize, j: usize) { assert!(i < self.len()); assert!(j < self.len()); let ri = self.wrap_add(self.tail, i); let rj = self.wrap_add(self.tail, j); unsafe { ptr::swap(self.ptr.offset(ri as isize), self.ptr.offset(rj as isize)) } } /// Returns the number of elements the `VecDeque` can hold without /// reallocating. /// /// # Examples /// /// ``` /// # #![feature(collections)] /// use std::collections::VecDeque; /// /// let buf: VecDeque<i32> = VecDeque::with_capacity(10); /// assert!(buf.capacity() >= 10); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn capacity(&self) -> usize { self.cap - 1 } /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the /// given `VecDeque`. Does nothing if the capacity is already sufficient. /// /// Note that the allocator may give the collection more space than it requests. Therefore /// capacity can not be relied upon to be precisely minimal. Prefer `reserve` if future /// insertions are expected. /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Examples /// /// ``` /// # #![feature(collections)] /// use std::collections::VecDeque; /// /// let mut buf: VecDeque<i32> = vec![1].into_iter().collect(); /// buf.reserve_exact(10); /// assert!(buf.capacity() >= 11); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve_exact(&mut self, additional: usize) { self.reserve(additional); } /// Reserves capacity for at least `additional` more elements to be inserted in the given /// `Ringbuf`. The collection may reserve more space to avoid frequent reallocations. /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Examples /// /// ``` /// # #![feature(collections)] /// use std::collections::VecDeque; /// /// let mut buf: VecDeque<i32> = vec![1].into_iter().collect(); /// buf.reserve(10); /// assert!(buf.capacity() >= 11); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve(&mut self, additional: usize) { let new_len = self.len() + additional; assert!(new_len + 1 > self.len(), "capacity overflow"); if new_len > self.capacity() { let count = (new_len + 1).next_power_of_two(); assert!(count >= new_len + 1); if mem::size_of::<T>() != 0 { let old = self.cap * mem::size_of::<T>(); let new = count.checked_mul(mem::size_of::<T>()) .expect("capacity overflow"); unsafe { let ptr = heap::reallocate(*self.ptr as *mut u8, old, new, mem::min_align_of::<T>()) as *mut T; if ptr.is_null() { ::alloc::oom() } self.ptr = Unique::new(ptr); } } // Move the shortest contiguous section of the ring buffer // T H // [o o o o o o o . ] // T H // A [o o o o o o o . . . . . . . . . ] // H T // [o o . o o o o o ] // T H // B [. . . o o o o o o o . . . . . . ] // H T // [o o o o o . o o ] // H T // C [o o o o o . . . . . . . . . o o ] let oldcap = self.cap; self.cap = count; if self.tail <= self.head { // A // Nop } else if self.head < oldcap - self.tail { // B unsafe { self.copy_nonoverlapping(oldcap, 0, self.head); } self.head += oldcap; debug_assert!(self.head > self.tail); } else { // C let new_tail = count - (oldcap - self.tail); unsafe { self.copy_nonoverlapping(new_tail, self.tail, oldcap - self.tail); } self.tail = new_tail; debug_assert!(self.head < self.tail); } debug_assert!(self.head < self.cap); debug_assert!(self.tail < self.cap); debug_assert!(self.cap.count_ones() == 1); } } /// Shrinks the capacity of the ringbuf as much as possible. /// /// It will drop down as close as possible to the length but the allocator may still inform the /// ringbuf that there is space for a few more elements. /// /// # Examples /// /// ``` /// # #![feature(collections)] /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::with_capacity(15); /// buf.extend(0..4); /// assert_eq!(buf.capacity(), 15); /// buf.shrink_to_fit(); /// assert!(buf.capacity() >= 4); /// ``` pub fn shrink_to_fit(&mut self) { // +1 since the ringbuffer always leaves one space empty // len + 1 can't overflow for an existing, well-formed ringbuf. let target_cap = cmp::max(self.len() + 1, MINIMUM_CAPACITY + 1).next_power_of_two(); if target_cap < self.cap { // There are three cases of interest: // All elements are out of desired bounds // Elements are contiguous, and head is out of desired bounds // Elements are discontiguous, and tail is out of desired bounds // // At all other times, element positions are unaffected. // // Indicates that elements at the head should be moved. let head_outside = self.head == 0 || self.head >= target_cap; // Move elements from out of desired bounds (positions after target_cap) if self.tail >= target_cap && head_outside { // T H // [. . . . . . . . o o o o o o o . ] // T H // [o o o o o o o . ] unsafe { self.copy_nonoverlapping(0, self.tail, self.len()); } self.head = self.len(); self.tail = 0; } else if self.tail != 0 && self.tail < target_cap && head_outside { // T H // [. . . o o o o o o o . . . . . . ] // H T // [o o . o o o o o ] let len = self.wrap_sub(self.head, target_cap); unsafe { self.copy_nonoverlapping(0, target_cap, len); } self.head = len; debug_assert!(self.head < self.tail); } else if self.tail >= target_cap { // H T // [o o o o o . . . . . . . . . o o ] // H T // [o o o o o . o o ] debug_assert!(self.wrap_sub(self.head, 1) < target_cap); let len = self.cap - self.tail; let new_tail = target_cap - len; unsafe { self.copy_nonoverlapping(new_tail, self.tail, len); } self.tail = new_tail; debug_assert!(self.head < self.tail); } if mem::size_of::<T>() != 0 { let old = self.cap * mem::size_of::<T>(); let new_size = target_cap * mem::size_of::<T>(); unsafe { let ptr = heap::reallocate(*self.ptr as *mut u8, old, new_size, mem::min_align_of::<T>()) as *mut T; if ptr.is_null() { ::alloc::oom() } self.ptr = Unique::new(ptr); } } self.cap = target_cap; debug_assert!(self.head < self.cap); debug_assert!(self.tail < self.cap); debug_assert!(self.cap.count_ones() == 1); } } /// Shortens a ringbuf, dropping excess elements from the back. /// /// If `len` is greater than the ringbuf's current length, this has no /// effect. /// /// # Examples /// /// ``` /// # #![feature(collections)] /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(5); /// buf.push_back(10); /// buf.push_back(15); /// buf.truncate(1); /// assert_eq!(buf.len(), 1); /// assert_eq!(Some(&5), buf.get(0)); /// ``` #[unstable(feature = "collections", reason = "matches collection reform specification; waiting on panic semantics")] pub fn truncate(&mut self, len: usize) { for _ in len..self.len() { self.pop_back(); } } /// Returns a front-to-back iterator. /// /// # Examples /// /// ``` /// # #![feature(core)] /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(5); /// buf.push_back(3); /// buf.push_back(4); /// let b: &[_] = &[&5, &3, &4]; /// let c: Vec<&i32> = buf.iter().collect(); /// assert_eq!(&c[..], b); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn iter(&self) -> Iter<T> { Iter { tail: self.tail, head: self.head, ring: unsafe { self.buffer_as_slice() } } } /// Returns a front-to-back iterator that returns mutable references. /// /// # Examples /// /// ``` /// # #![feature(core)] /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(5); /// buf.push_back(3); /// buf.push_back(4); /// for num in buf.iter_mut() { /// *num = *num - 2; /// } /// let b: &[_] = &[&mut 3, &mut 1, &mut 2]; /// assert_eq!(&buf.iter_mut().collect::<Vec<&mut i32>>()[..], b); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn iter_mut(&mut self) -> IterMut<T> { IterMut { tail: self.tail, head: self.head, ring: unsafe { self.buffer_as_mut_slice() }, } } /// Returns a pair of slices which contain, in order, the contents of the /// `VecDeque`. #[inline] #[unstable(feature = "collections", reason = "matches collection reform specification, waiting for dust to settle")] pub fn as_slices(&self) -> (&[T], &[T]) { unsafe { let contiguous = self.is_contiguous(); let buf = self.buffer_as_slice(); if contiguous { let (empty, buf) = buf.split_at(0); (&buf[self.tail..self.head], empty) } else { let (mid, right) = buf.split_at(self.tail); let (left, _) = mid.split_at(self.head); (right, left) } } } /// Returns a pair of slices which contain, in order, the contents of the /// `VecDeque`. #[inline] #[unstable(feature = "collections", reason = "matches collection reform specification, waiting for dust to settle")] pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) { unsafe { let contiguous = self.is_contiguous(); let head = self.head; let tail = self.tail; let buf = self.buffer_as_mut_slice(); if contiguous { let (empty, buf) = buf.split_at_mut(0); (&mut buf[tail .. head], empty) } else { let (mid, right) = buf.split_at_mut(tail); let (left, _) = mid.split_at_mut(head); (right, left) } } } /// Returns the number of elements in the `VecDeque`. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut v = VecDeque::new(); /// assert_eq!(v.len(), 0); /// v.push_back(1); /// assert_eq!(v.len(), 1); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn len(&self) -> usize { count(self.tail, self.head, self.cap) } /// Returns true if the buffer contains no elements /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut v = VecDeque::new(); /// assert!(v.is_empty()); /// v.push_front(1); /// assert!(!v.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn is_empty(&self) -> bool { self.len() == 0 } /// Creates a draining iterator that clears the `VecDeque` and iterates over /// the removed items from start to end. /// /// # Examples /// /// ``` /// # #![feature(collections)] /// use std::collections::VecDeque; /// /// let mut v = VecDeque::new(); /// v.push_back(1); /// assert_eq!(v.drain().next(), Some(1)); /// assert!(v.is_empty()); /// ``` #[inline] #[unstable(feature = "collections", reason = "matches collection reform specification, waiting for dust to settle")] pub fn drain(&mut self) -> Drain<T> { Drain { inner: self, } } /// Clears the buffer, removing all values. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut v = VecDeque::new(); /// v.push_back(1); /// v.clear(); /// assert!(v.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn clear(&mut self) { self.drain(); } /// Provides a reference to the front element, or `None` if the sequence is /// empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// assert_eq!(d.front(), None); /// /// d.push_back(1); /// d.push_back(2); /// assert_eq!(d.front(), Some(&1)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn front(&self) -> Option<&T> { if !self.is_empty() { Some(&self[0]) } else { None } } /// Provides a mutable reference to the front element, or `None` if the /// sequence is empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// assert_eq!(d.front_mut(), None); /// /// d.push_back(1); /// d.push_back(2); /// match d.front_mut() { /// Some(x) => *x = 9, /// None => (), /// } /// assert_eq!(d.front(), Some(&9)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn front_mut(&mut self) -> Option<&mut T> { if !self.is_empty() { Some(&mut self[0]) } else { None } } /// Provides a reference to the back element, or `None` if the sequence is /// empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// assert_eq!(d.back(), None); /// /// d.push_back(1); /// d.push_back(2); /// assert_eq!(d.back(), Some(&2)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn back(&self) -> Option<&T> { if !self.is_empty() { Some(&self[self.len() - 1]) } else { None } } /// Provides a mutable reference to the back element, or `None` if the /// sequence is empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// assert_eq!(d.back(), None); /// /// d.push_back(1); /// d.push_back(2); /// match d.back_mut() { /// Some(x) => *x = 9, /// None => (), /// } /// assert_eq!(d.back(), Some(&9)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn back_mut(&mut self) -> Option<&mut T> { let len = self.len(); if !self.is_empty() { Some(&mut self[len - 1]) } else { None } } /// Removes the first element and returns it, or `None` if the sequence is /// empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// d.push_back(1); /// d.push_back(2); /// /// assert_eq!(d.pop_front(), Some(1)); /// assert_eq!(d.pop_front(), Some(2)); /// assert_eq!(d.pop_front(), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn pop_front(&mut self) -> Option<T> { if self.is_empty() { None } else { let tail = self.tail; self.tail = self.wrap_add(self.tail, 1); unsafe { Some(self.buffer_read(tail)) } } } /// Inserts an element first in the sequence. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// d.push_front(1); /// d.push_front(2); /// assert_eq!(d.front(), Some(&2)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn push_front(&mut self, t: T) { if self.is_full() { self.reserve(1); debug_assert!(!self.is_full()); } self.tail = self.wrap_sub(self.tail, 1); let tail = self.tail; unsafe { self.buffer_write(tail, t); } } /// Appends an element to the back of a buffer /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(1); /// buf.push_back(3); /// assert_eq!(3, *buf.back().unwrap()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn push_back(&mut self, t: T) { if self.is_full() { self.reserve(1); debug_assert!(!self.is_full()); } let head = self.head; self.head = self.wrap_add(self.head, 1); unsafe { self.buffer_write(head, t) } } /// Removes the last element from a buffer and returns it, or `None` if /// it is empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// assert_eq!(buf.pop_back(), None); /// buf.push_back(1); /// buf.push_back(3); /// assert_eq!(buf.pop_back(), Some(3)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn pop_back(&mut self) -> Option<T> { if self.is_empty() { None } else { self.head = self.wrap_sub(self.head, 1); let head = self.head; unsafe { Some(self.buffer_read(head)) } } } #[inline] fn is_contiguous(&self) -> bool { self.tail <= self.head } /// Removes an element from anywhere in the ringbuf and returns it, replacing it with the last /// element. /// /// This does not preserve ordering, but is O(1). /// /// Returns `None` if `index` is out of bounds. /// /// # Examples /// /// ``` /// # #![feature(collections)] /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// assert_eq!(buf.swap_back_remove(0), None); /// buf.push_back(5); /// buf.push_back(99); /// buf.push_back(15); /// buf.push_back(20); /// buf.push_back(10); /// assert_eq!(buf.swap_back_remove(1), Some(99)); /// ``` #[unstable(feature = "collections", reason = "the naming of this function may be altered")] pub fn swap_back_remove(&mut self, index: usize) -> Option<T> { let length = self.len(); if length > 0 && index < length - 1 { self.swap(index, length - 1); } else if index >= length { return None; } self.pop_back() } /// Removes an element from anywhere in the ringbuf and returns it, replacing it with the first /// element. /// /// This does not preserve ordering, but is O(1). /// /// Returns `None` if `index` is out of bounds. /// /// # Examples /// /// ``` /// # #![feature(collections)] /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// assert_eq!(buf.swap_front_remove(0), None); /// buf.push_back(15); /// buf.push_back(5); /// buf.push_back(10); /// buf.push_back(99); /// buf.push_back(20); /// assert_eq!(buf.swap_front_remove(3), Some(99)); /// ``` #[unstable(feature = "collections", reason = "the naming of this function may be altered")] pub fn swap_front_remove(&mut self, index: usize) -> Option<T> { let length = self.len(); if length > 0 && index < length && index != 0 { self.swap(index, 0); } else if index >= length { return None; } self.pop_front() } /// Inserts an element at position `i` within the ringbuf. Whichever /// end is closer to the insertion point will be moved to make room, /// and all the affected elements will be moved to new positions. /// /// # Panics /// /// Panics if `i` is greater than ringbuf's length /// /// # Examples /// ``` /// # #![feature(collections)] /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(10); /// buf.push_back(12); /// buf.insert(1,11); /// assert_eq!(Some(&11), buf.get(1)); /// ``` pub fn insert(&mut self, i: usize, t: T) { assert!(i <= self.len(), "index out of bounds"); if self.is_full() { self.reserve(1); debug_assert!(!self.is_full()); } // Move the least number of elements in the ring buffer and insert // the given object // // At most len/2 - 1 elements will be moved. O(min(n, n-i)) // // There are three main cases: // Elements are contiguous // - special case when tail is 0 // Elements are discontiguous and the insert is in the tail section // Elements are discontiguous and the insert is in the head section // // For each of those there are two more cases: // Insert is closer to tail // Insert is closer to head // // Key: H - self.head // T - self.tail // o - Valid element // I - Insertion element // A - The element that should be after the insertion point // M - Indicates element was moved let idx = self.wrap_add(self.tail, i); let distance_to_tail = i; let distance_to_head = self.len() - i; let contiguous = self.is_contiguous(); match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) { (true, true, _) if i == 0 => { // push_front // // T // I H // [A o o o o o o . . . . . . . . .] // // H T // [A o o o o o o o . . . . . I] // self.tail = self.wrap_sub(self.tail, 1); }, (true, true, _) => unsafe { // contiguous, insert closer to tail: // // T I H // [. . . o o A o o o o . . . . . .] // // T H // [. . o o I A o o o o . . . . . .] // M M // // contiguous, insert closer to tail and tail is 0: // // // T I H // [o o A o o o o . . . . . . . . .] // // H T // [o I A o o o o o . . . . . . . o] // M M let new_tail = self.wrap_sub(self.tail, 1); self.copy(new_tail, self.tail, 1); // Already moved the tail, so we only copy `i - 1` elements. self.copy(self.tail, self.tail + 1, i - 1); self.tail = new_tail; }, (true, false, _) => unsafe { // contiguous, insert closer to head: // // T I H // [. . . o o o o A o o . . . . . .] // // T H // [. . . o o o o I A o o . . . . .] // M M M self.copy(idx + 1, idx, self.head - idx); self.head = self.wrap_add(self.head, 1); }, (false, true, true) => unsafe { // discontiguous, insert closer to tail, tail section: // // H T I // [o o o o o o . . . . . o o A o o] // // H T // [o o o o o o . . . . o o I A o o] // M M self.copy(self.tail - 1, self.tail, i); self.tail -= 1; }, (false, false, true) => unsafe { // discontiguous, insert closer to head, tail section: // // H T I // [o o . . . . . . . o o o o o A o] // // H T // [o o o . . . . . . o o o o o I A] // M M M M // copy elements up to new head self.copy(1, 0, self.head); // copy last element into empty spot at bottom of buffer self.copy(0, self.cap - 1, 1); // move elements from idx to end forward not including ^ element self.copy(idx + 1, idx, self.cap - 1 - idx); self.head += 1; }, (false, true, false) if idx == 0 => unsafe { // discontiguous, insert is closer to tail, head section, // and is at index zero in the internal buffer: // // I H T // [A o o o o o o o o o . . . o o o] // // H T // [A o o o o o o o o o . . o o o I] // M M M // copy elements up to new tail self.copy(self.tail - 1, self.tail, self.cap - self.tail); // copy last element into empty spot at bottom of buffer self.copy(self.cap - 1, 0, 1); self.tail -= 1; }, (false, true, false) => unsafe { // discontiguous, insert closer to tail, head section: // // I H T // [o o o A o o o o o o . . . o o o] // // H T // [o o I A o o o o o o . . o o o o] // M M M M M M // copy elements up to new tail self.copy(self.tail - 1, self.tail, self.cap - self.tail); // copy last element into empty spot at bottom of buffer self.copy(self.cap - 1, 0, 1); // move elements from idx-1 to end forward not including ^ element self.copy(0, 1, idx - 1); self.tail -= 1; }, (false, false, false) => unsafe { // discontiguous, insert closer to head, head section: // // I H T // [o o o o A o o . . . . . . o o o] // // H T // [o o o o I A o o . . . . . o o o] // M M M self.copy(idx + 1, idx, self.head - idx); self.head += 1; } } // tail might've been changed so we need to recalculate let new_idx = self.wrap_add(self.tail, i); unsafe { self.buffer_write(new_idx, t); } } /// Removes and returns the element at position `i` from the ringbuf. /// Whichever end is closer to the removal point will be moved to make /// room, and all the affected elements will be moved to new positions. /// Returns `None` if `i` is out of bounds. /// /// # Examples /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(5); /// buf.push_back(10); /// buf.push_back(12); /// buf.push_back(15); /// buf.remove(2); /// assert_eq!(Some(&15), buf.get(2)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn remove(&mut self, i: usize) -> Option<T> { if self.is_empty() || self.len() <= i { return None; } // There are three main cases: // Elements are contiguous // Elements are discontiguous and the removal is in the tail section // Elements are discontiguous and the removal is in the head section // - special case when elements are technically contiguous, // but self.head = 0 // // For each of those there are two more cases: // Insert is closer to tail // Insert is closer to head // // Key: H - self.head // T - self.tail // o - Valid element // x - Element marked for removal // R - Indicates element that is being removed // M - Indicates element was moved let idx = self.wrap_add(self.tail, i); let elem = unsafe { Some(self.buffer_read(idx)) }; let distance_to_tail = i; let distance_to_head = self.len() - i; let contiguous = self.is_contiguous(); match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) { (true, true, _) => unsafe { // contiguous, remove closer to tail: // // T R H // [. . . o o x o o o o . . . . . .] // // T H // [. . . . o o o o o o . . . . . .] // M M self.copy(self.tail + 1, self.tail, i); self.tail += 1; }, (true, false, _) => unsafe { // contiguous, remove closer to head: // // T R H // [. . . o o o o x o o . . . . . .] // // T H // [. . . o o o o o o . . . . . . .] // M M self.copy(idx, idx + 1, self.head - idx - 1); self.head -= 1; }, (false, true, true) => unsafe { // discontiguous, remove closer to tail, tail section: // // H T R // [o o o o o o . . . . . o o x o o] // // H T // [o o o o o o . . . . . . o o o o] // M M self.copy(self.tail + 1, self.tail, i); self.tail = self.wrap_add(self.tail, 1); }, (false, false, false) => unsafe { // discontiguous, remove closer to head, head section: // // R H T // [o o o o x o o . . . . . . o o o] // // H T // [o o o o o o . . . . . . . o o o] // M M self.copy(idx, idx + 1, self.head - idx - 1); self.head -= 1; }, (false, false, true) => unsafe { // discontiguous, remove closer to head, tail section: // // H T R // [o o o . . . . . . o o o o o x o] // // H T // [o o . . . . . . . o o o o o o o] // M M M M // // or quasi-discontiguous, remove next to head, tail section: // // H T R // [. . . . . . . . . o o o o o x o] // // T H // [. . . . . . . . . o o o o o o .] // M // draw in elements in the tail section self.copy(idx, idx + 1, self.cap - idx - 1); // Prevents underflow. if self.head != 0 { // copy first element into empty spot self.copy(self.cap - 1, 0, 1); // move elements in the head section backwards self.copy(0, 1, self.head - 1); } self.head = self.wrap_sub(self.head, 1); }, (false, true, false) => unsafe { // discontiguous, remove closer to tail, head section: // // R H T // [o o x o o o o o o o . . . o o o] // // H T // [o o o o o o o o o o . . . . o o] // M M M M M // draw in elements up to idx self.copy(1, 0, idx); // copy last element into empty spot self.copy(0, self.cap - 1, 1); // move elements from tail to end forward, excluding the last one self.copy(self.tail + 1, self.tail, self.cap - self.tail - 1); self.tail = self.wrap_add(self.tail, 1); } } return elem; } /// Splits the collection into two at the given index. /// /// Returns a newly allocated `Self`. `self` contains elements `[0, at)`, /// and the returned `Self` contains elements `[at, len)`. /// /// Note that the capacity of `self` does not change. /// /// # Panics /// /// Panics if `at > len` /// /// # Examples /// /// ``` /// # #![feature(collections)] /// use std::collections::VecDeque; /// /// let mut buf: VecDeque<_> = vec![1,2,3].into_iter().collect(); /// let buf2 = buf.split_off(1); /// // buf = [1], buf2 = [2, 3] /// assert_eq!(buf.len(), 1); /// assert_eq!(buf2.len(), 2); /// ``` #[inline] #[unstable(feature = "collections", reason = "new API, waiting for dust to settle")] pub fn split_off(&mut self, at: usize) -> Self { let len = self.len(); assert!(at <= len, "`at` out of bounds"); let other_len = len - at; let mut other = VecDeque::with_capacity(other_len); unsafe { let (first_half, second_half) = self.as_slices(); let first_len = first_half.len(); let second_len = second_half.len(); if at < first_len { // `at` lies in the first half. let amount_in_first = first_len - at; ptr::copy_nonoverlapping(first_half.as_ptr().offset(at as isize), *other.ptr, amount_in_first); // just take all of the second half. ptr::copy_nonoverlapping(second_half.as_ptr(), other.ptr.offset(amount_in_first as isize), second_len); } else { // `at` lies in the second half, need to factor in the elements we skipped // in the first half. let offset = at - first_len; let amount_in_second = second_len - offset; ptr::copy_nonoverlapping(second_half.as_ptr().offset(offset as isize), *other.ptr, amount_in_second); } } // Cleanup where the ends of the buffers are self.head = self.wrap_sub(self.head, other_len); other.head = other.wrap_index(other_len); other } /// Moves all the elements of `other` into `Self`, leaving `other` empty. /// /// # Panics /// /// Panics if the new number of elements in self overflows a `usize`. /// /// # Examples /// /// ``` /// # #![feature(collections)] /// use std::collections::VecDeque; /// /// let mut buf: VecDeque<_> = vec![1, 2, 3].into_iter().collect(); /// let mut buf2: VecDeque<_> = vec![4, 5, 6].into_iter().collect(); /// buf.append(&mut buf2); /// assert_eq!(buf.len(), 6); /// assert_eq!(buf2.len(), 0); /// ``` #[inline] #[unstable(feature = "collections", reason = "new API, waiting for dust to settle")] pub fn append(&mut self, other: &mut Self) { // naive impl self.extend(other.drain()); } /// Retains only the elements specified by the predicate. /// /// In other words, remove all elements `e` such that `f(&e)` returns false. /// This method operates in place and preserves the order of the retained /// elements. /// /// # Examples /// /// ``` /// # #![feature(vec_deque_retain)] /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.extend(1..5); /// buf.retain(|&x| x%2 == 0); /// /// let v: Vec<_> = buf.into_iter().collect(); /// assert_eq!(&v[..], &[2, 4]); /// ``` #[unstable(feature = "vec_deque_retain", reason = "new API, waiting for dust to settle")] pub fn retain<F>(&mut self, mut f: F) where F: FnMut(&T) -> bool { let len = self.len(); let mut del = 0; for i in 0..len { if !f(&self[i]) { del += 1; } else if del > 0 { self.swap(i-del, i); } } if del > 0 { self.truncate(len - del); } } } impl<T: Clone> VecDeque<T> { /// Modifies the ringbuf in-place so that `len()` is equal to new_len, /// either by removing excess elements or by appending copies of a value to the back. /// /// # Examples /// /// ``` /// # #![feature(collections)] /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(5); /// buf.push_back(10); /// buf.push_back(15); /// buf.resize(2, 0); /// buf.resize(6, 20); /// for (a, b) in [5, 10, 20, 20, 20, 20].iter().zip(buf.iter()) { /// assert_eq!(a, b); /// } /// ``` #[unstable(feature = "collections", reason = "matches collection reform specification; waiting on panic semantics")] pub fn resize(&mut self, new_len: usize, value: T) { let len = self.len(); if new_len > len { self.extend(repeat(value).take(new_len - len)) } else { self.truncate(new_len); } } } /// Returns the index in the underlying buffer for a given logical element index. #[inline] fn wrap_index(index: usize, size: usize) -> usize { // size is always a power of 2 index & (size - 1) } /// Calculate the number of elements left to be read in the buffer #[inline] fn count(tail: usize, head: usize, size: usize) -> usize { // size is always a power of 2 (head.wrapping_sub(tail)) & (size - 1) } /// `VecDeque` iterator. #[stable(feature = "rust1", since = "1.0.0")] pub struct Iter<'a, T:'a> { ring: &'a [T], tail: usize, head: usize } // FIXME(#19839) Remove in favor of `#[derive(Clone)]` impl<'a, T> Clone for Iter<'a, T> { fn clone(&self) -> Iter<'a, T> { Iter { ring: self.ring, tail: self.tail, head: self.head } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for Iter<'a, T> { type Item = &'a T; #[inline] fn next(&mut self) -> Option<&'a T> { if self.tail == self.head { return None; } let tail = self.tail; self.tail = wrap_index(self.tail.wrapping_add(1), self.ring.len()); unsafe { Some(self.ring.get_unchecked(tail)) } } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { let len = count(self.tail, self.head, self.ring.len()); (len, Some(len)) } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for Iter<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a T> { if self.tail == self.head { return None; } self.head = wrap_index(self.head.wrapping_sub(1), self.ring.len()); unsafe { Some(self.ring.get_unchecked(self.head)) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> ExactSizeIterator for Iter<'a, T> {} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> RandomAccessIterator for Iter<'a, T> { #[inline] fn indexable(&self) -> usize { let (len, _) = self.size_hint(); len } #[inline] fn idx(&mut self, j: usize) -> Option<&'a T> { if j >= self.indexable() { None } else { let idx = wrap_index(self.tail.wrapping_add(j), self.ring.len()); unsafe { Some(self.ring.get_unchecked(idx)) } } } } /// `VecDeque` mutable iterator. #[stable(feature = "rust1", since = "1.0.0")] pub struct IterMut<'a, T:'a> { ring: &'a mut [T], tail: usize, head: usize, } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for IterMut<'a, T> { type Item = &'a mut T; #[inline] fn next(&mut self) -> Option<&'a mut T> { if self.tail == self.head { return None; } let tail = self.tail; self.tail = wrap_index(self.tail.wrapping_add(1), self.ring.len()); unsafe { let elem = self.ring.get_unchecked_mut(tail); Some(&mut *(elem as *mut _)) } } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { let len = count(self.tail, self.head, self.ring.len()); (len, Some(len)) } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for IterMut<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a mut T> { if self.tail == self.head { return None; } self.head = wrap_index(self.head.wrapping_sub(1), self.ring.len()); unsafe { let elem = self.ring.get_unchecked_mut(self.head); Some(&mut *(elem as *mut _)) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> ExactSizeIterator for IterMut<'a, T> {} /// A by-value VecDeque iterator #[derive(Clone)] #[stable(feature = "rust1", since = "1.0.0")] pub struct IntoIter<T> { inner: VecDeque<T>, } #[stable(feature = "rust1", since = "1.0.0")] impl<T> Iterator for IntoIter<T> { type Item = T; #[inline] fn next(&mut self) -> Option<T> { self.inner.pop_front() } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { let len = self.inner.len(); (len, Some(len)) } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> DoubleEndedIterator for IntoIter<T> { #[inline] fn next_back(&mut self) -> Option<T> { self.inner.pop_back() } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> ExactSizeIterator for IntoIter<T> {} /// A draining VecDeque iterator #[unstable(feature = "collections", reason = "matches collection reform specification, waiting for dust to settle")] pub struct Drain<'a, T: 'a> { inner: &'a mut VecDeque<T>, } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T: 'a> Drop for Drain<'a, T> { fn drop(&mut self) { for _ in self.by_ref() {} self.inner.head = 0; self.inner.tail = 0; } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T: 'a> Iterator for Drain<'a, T> { type Item = T; #[inline] fn next(&mut self) -> Option<T> { self.inner.pop_front() } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { let len = self.inner.len(); (len, Some(len)) } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T: 'a> DoubleEndedIterator for Drain<'a, T> { #[inline] fn next_back(&mut self) -> Option<T> { self.inner.pop_back() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {} #[stable(feature = "rust1", since = "1.0.0")] impl<A: PartialEq> PartialEq for VecDeque<A> { fn eq(&self, other: &VecDeque<A>) -> bool { self.len() == other.len() && self.iter().zip(other.iter()).all(|(a, b)| a.eq(b)) } } #[stable(feature = "rust1", since = "1.0.0")] impl<A: Eq> Eq for VecDeque<A> {} #[stable(feature = "rust1", since = "1.0.0")] impl<A: PartialOrd> PartialOrd for VecDeque<A> { fn partial_cmp(&self, other: &VecDeque<A>) -> Option<Ordering> { iter::order::partial_cmp(self.iter(), other.iter()) } } #[stable(feature = "rust1", since = "1.0.0")] impl<A: Ord> Ord for VecDeque<A> { #[inline] fn cmp(&self, other: &VecDeque<A>) -> Ordering { iter::order::cmp(self.iter(), other.iter()) } } #[stable(feature = "rust1", since = "1.0.0")] impl<A: Hash> Hash for VecDeque<A> { fn hash<H: Hasher>(&self, state: &mut H) { self.len().hash(state); for elt in self { elt.hash(state); } } } #[stable(feature = "rust1", since = "1.0.0")] impl<A> Index<usize> for VecDeque<A> { type Output = A; #[inline] fn index(&self, i: usize) -> &A { self.get(i).expect("Out of bounds access") } } #[stable(feature = "rust1", since = "1.0.0")] impl<A> IndexMut<usize> for VecDeque<A> { #[inline] fn index_mut(&mut self, i: usize) -> &mut A { self.get_mut(i).expect("Out of bounds access") } } #[stable(feature = "rust1", since = "1.0.0")] impl<A> FromIterator<A> for VecDeque<A> { fn from_iter<T: IntoIterator<Item=A>>(iterable: T) -> VecDeque<A> { let iterator = iterable.into_iter(); let (lower, _) = iterator.size_hint(); let mut deq = VecDeque::with_capacity(lower); deq.extend(iterator); deq } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> IntoIterator for VecDeque<T> { type Item = T; type IntoIter = IntoIter<T>; /// Consumes the list into a front-to-back iterator yielding elements by /// value. fn into_iter(self) -> IntoIter<T> { IntoIter { inner: self, } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> IntoIterator for &'a VecDeque<T> { type Item = &'a T; type IntoIter = Iter<'a, T>; fn into_iter(self) -> Iter<'a, T> { self.iter() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> IntoIterator for &'a mut VecDeque<T> { type Item = &'a mut T; type IntoIter = IterMut<'a, T>; fn into_iter(mut self) -> IterMut<'a, T> { self.iter_mut() } } #[stable(feature = "rust1", since = "1.0.0")] impl<A> Extend<A> for VecDeque<A> { fn extend<T: IntoIterator<Item=A>>(&mut self, iter: T) { for elt in iter { self.push_back(elt); } } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: fmt::Debug> fmt::Debug for VecDeque<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(write!(f, "[")); for (i, e) in self.iter().enumerate() { if i != 0 { try!(write!(f, ", ")); } try!(write!(f, "{:?}", *e)); } write!(f, "]") } } #[cfg(test)] mod tests { use core::iter::{Iterator, self}; use core::option::Option::Some; use test; use super::VecDeque; #[bench] fn bench_push_back_100(b: &mut test::Bencher) { let mut deq = VecDeque::with_capacity(101); b.iter(|| { for i in 0..100 { deq.push_back(i); } deq.head = 0; deq.tail = 0; }) } #[bench] fn bench_push_front_100(b: &mut test::Bencher) { let mut deq = VecDeque::with_capacity(101); b.iter(|| { for i in 0..100 { deq.push_front(i); } deq.head = 0; deq.tail = 0; }) } #[bench] fn bench_pop_back_100(b: &mut test::Bencher) { let mut deq= VecDeque::<i32>::with_capacity(101); b.iter(|| { deq.head = 100; deq.tail = 0; while !deq.is_empty() { test::black_box(deq.pop_back()); } }) } #[bench] fn bench_pop_front_100(b: &mut test::Bencher) { let mut deq = VecDeque::<i32>::with_capacity(101); b.iter(|| { deq.head = 100; deq.tail = 0; while !deq.is_empty() { test::black_box(deq.pop_front()); } }) } #[test] fn test_swap_front_back_remove() { fn test(back: bool) { // This test checks that every single combination of tail position and length is tested. // Capacity 15 should be large enough to cover every case. let mut tester = VecDeque::with_capacity(15); let usable_cap = tester.capacity(); let final_len = usable_cap / 2; for len in 0..final_len { let expected = if back { (0..len).collect() } else { (0..len).rev().collect() }; for tail_pos in 0..usable_cap { tester.tail = tail_pos; tester.head = tail_pos; if back { for i in 0..len * 2 { tester.push_front(i); } for i in 0..len { assert_eq!(tester.swap_back_remove(i), Some(len * 2 - 1 - i)); } } else { for i in 0..len * 2 { tester.push_back(i); } for i in 0..len { let idx = tester.len() - 1 - i; assert_eq!(tester.swap_front_remove(idx), Some(len * 2 - 1 - i)); } } assert!(tester.tail < tester.cap); assert!(tester.head < tester.cap); assert_eq!(tester, expected); } } } test(true); test(false); } #[test] fn test_insert() { // This test checks that every single combination of tail position, length, and // insertion position is tested. Capacity 15 should be large enough to cover every case. let mut tester = VecDeque::with_capacity(15); // can't guarantee we got 15, so have to get what we got. // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else // this test isn't covering what it wants to let cap = tester.capacity(); // len is the length *after* insertion for len in 1..cap { // 0, 1, 2, .., len - 1 let expected = (0..).take(len).collect(); for tail_pos in 0..cap { for to_insert in 0..len { tester.tail = tail_pos; tester.head = tail_pos; for i in 0..len { if i != to_insert { tester.push_back(i); } } tester.insert(to_insert, to_insert); assert!(tester.tail < tester.cap); assert!(tester.head < tester.cap); assert_eq!(tester, expected); } } } } #[test] fn test_remove() { // This test checks that every single combination of tail position, length, and // removal position is tested. Capacity 15 should be large enough to cover every case. let mut tester = VecDeque::with_capacity(15); // can't guarantee we got 15, so have to get what we got. // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else // this test isn't covering what it wants to let cap = tester.capacity(); // len is the length *after* removal for len in 0..cap - 1 { // 0, 1, 2, .., len - 1 let expected = (0..).take(len).collect(); for tail_pos in 0..cap { for to_remove in 0..len + 1 { tester.tail = tail_pos; tester.head = tail_pos; for i in 0..len { if i == to_remove { tester.push_back(1234); } tester.push_back(i); } if to_remove == len { tester.push_back(1234); } tester.remove(to_remove); assert!(tester.tail < tester.cap); assert!(tester.head < tester.cap); assert_eq!(tester, expected); } } } } #[test] fn test_shrink_to_fit() { // This test checks that every single combination of head and tail position, // is tested. Capacity 15 should be large enough to cover every case. let mut tester = VecDeque::with_capacity(15); // can't guarantee we got 15, so have to get what we got. // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else // this test isn't covering what it wants to let cap = tester.capacity(); tester.reserve(63); let max_cap = tester.capacity(); for len in 0..cap + 1 { // 0, 1, 2, .., len - 1 let expected = (0..).take(len).collect(); for tail_pos in 0..max_cap + 1 { tester.tail = tail_pos; tester.head = tail_pos; tester.reserve(63); for i in 0..len { tester.push_back(i); } tester.shrink_to_fit(); assert!(tester.capacity() <= cap); assert!(tester.tail < tester.cap); assert!(tester.head < tester.cap); assert_eq!(tester, expected); } } } #[test] fn test_split_off() { // This test checks that every single combination of tail position, length, and // split position is tested. Capacity 15 should be large enough to cover every case. let mut tester = VecDeque::with_capacity(15); // can't guarantee we got 15, so have to get what we got. // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else // this test isn't covering what it wants to let cap = tester.capacity(); // len is the length *before* splitting for len in 0..cap { // index to split at for at in 0..len + 1 { // 0, 1, 2, .., at - 1 (may be empty) let expected_self = (0..).take(at).collect(); // at, at + 1, .., len - 1 (may be empty) let expected_other = (at..).take(len - at).collect(); for tail_pos in 0..cap { tester.tail = tail_pos; tester.head = tail_pos; for i in 0..len { tester.push_back(i); } let result = tester.split_off(at); assert!(tester.tail < tester.cap); assert!(tester.head < tester.cap); assert!(result.tail < result.cap); assert!(result.head < result.cap); assert_eq!(tester, expected_self); assert_eq!(result, expected_other); } } } } }
32.217836
100
0.465278
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_swap_front_back_remove() {\n fn test(back: bool) {\n // This test checks that every single combination of tail position and length is tested.\n // Capacity 15 should be large enough to cover every case.\n let mut tester = VecDeque::with_capacity(15);\n let usable_cap = tester.capacity();\n let final_len = usable_cap / 2;\n\n for len in 0..final_len {\n let expected = if back {\n (0..len).collect()\n } else {\n (0..len).rev().collect()\n };\n for tail_pos in 0..usable_cap {\n tester.tail = tail_pos;\n tester.head = tail_pos;\n if back {\n for i in 0..len * 2 {\n tester.push_front(i);\n }\n for i in 0..len {\n assert_eq!(tester.swap_back_remove(i), Some(len * 2 - 1 - i));\n }\n } else {\n for i in 0..len * 2 {\n tester.push_back(i);\n }\n for i in 0..len {\n let idx = tester.len() - 1 - i;\n assert_eq!(tester.swap_front_remove(idx), Some(len * 2 - 1 - i));\n }\n }\n assert!(tester.tail < tester.cap);\n assert!(tester.head < tester.cap);\n assert_eq!(tester, expected);\n }\n }\n }\n test(true);\n test(false);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_insert() {\n // This test checks that every single combination of tail position, length, and\n // insertion position is tested. Capacity 15 should be large enough to cover every case.\n\n let mut tester = VecDeque::with_capacity(15);\n // can't guarantee we got 15, so have to get what we got.\n // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else\n // this test isn't covering what it wants to\n let cap = tester.capacity();\n\n\n // len is the length *after* insertion\n for len in 1..cap {\n // 0, 1, 2, .., len - 1\n let expected = (0..).take(len).collect();\n for tail_pos in 0..cap {\n for to_insert in 0..len {\n tester.tail = tail_pos;\n tester.head = tail_pos;\n for i in 0..len {\n if i != to_insert {\n tester.push_back(i);\n }\n }\n tester.insert(to_insert, to_insert);\n assert!(tester.tail < tester.cap);\n assert!(tester.head < tester.cap);\n assert_eq!(tester, expected);\n }\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_remove() {\n // This test checks that every single combination of tail position, length, and\n // removal position is tested. Capacity 15 should be large enough to cover every case.\n\n let mut tester = VecDeque::with_capacity(15);\n // can't guarantee we got 15, so have to get what we got.\n // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else\n // this test isn't covering what it wants to\n let cap = tester.capacity();\n\n // len is the length *after* removal\n for len in 0..cap - 1 {\n // 0, 1, 2, .., len - 1\n let expected = (0..).take(len).collect();\n for tail_pos in 0..cap {\n for to_remove in 0..len + 1 {\n tester.tail = tail_pos;\n tester.head = tail_pos;\n for i in 0..len {\n if i == to_remove {\n tester.push_back(1234);\n }\n tester.push_back(i);\n }\n if to_remove == len {\n tester.push_back(1234);\n }\n tester.remove(to_remove);\n assert!(tester.tail < tester.cap);\n assert!(tester.head < tester.cap);\n assert_eq!(tester, expected);\n }\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_shrink_to_fit() {\n // This test checks that every single combination of head and tail position,\n // is tested. Capacity 15 should be large enough to cover every case.\n\n let mut tester = VecDeque::with_capacity(15);\n // can't guarantee we got 15, so have to get what we got.\n // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else\n // this test isn't covering what it wants to\n let cap = tester.capacity();\n tester.reserve(63);\n let max_cap = tester.capacity();\n\n for len in 0..cap + 1 {\n // 0, 1, 2, .., len - 1\n let expected = (0..).take(len).collect();\n for tail_pos in 0..max_cap + 1 {\n tester.tail = tail_pos;\n tester.head = tail_pos;\n tester.reserve(63);\n for i in 0..len {\n tester.push_back(i);\n }\n tester.shrink_to_fit();\n assert!(tester.capacity() <= cap);\n assert!(tester.tail < tester.cap);\n assert!(tester.head < tester.cap);\n assert_eq!(tester, expected);\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_split_off() {\n // This test checks that every single combination of tail position, length, and\n // split position is tested. Capacity 15 should be large enough to cover every case.\n\n let mut tester = VecDeque::with_capacity(15);\n // can't guarantee we got 15, so have to get what we got.\n // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else\n // this test isn't covering what it wants to\n let cap = tester.capacity();\n\n // len is the length *before* splitting\n for len in 0..cap {\n // index to split at\n for at in 0..len + 1 {\n // 0, 1, 2, .., at - 1 (may be empty)\n let expected_self = (0..).take(at).collect();\n // at, at + 1, .., len - 1 (may be empty)\n let expected_other = (at..).take(len - at).collect();\n\n for tail_pos in 0..cap {\n tester.tail = tail_pos;\n tester.head = tail_pos;\n for i in 0..len {\n tester.push_back(i);\n }\n let result = tester.split_off(at);\n assert!(tester.tail < tester.cap);\n assert!(tester.head < tester.cap);\n assert!(result.tail < result.cap);\n assert!(result.head < result.cap);\n assert_eq!(tester, expected_self);\n assert_eq!(result, expected_other);\n }\n }\n }\n }\n}" ]
f70921ce3edb9be5efdbe740df2e55661d134c67
1,671
rs
Rust
vrp-pragmatic/tests/unit/utils/approx_transportation_test.rs
valerivp/vrp
27ee30e5f4c44e051e5cec1248e606305b52fc00
[ "Apache-2.0" ]
1
2021-04-06T08:26:03.000Z
2021-04-06T08:26:03.000Z
vrp-pragmatic/tests/unit/utils/approx_transportation_test.rs
valerivp/vrp
27ee30e5f4c44e051e5cec1248e606305b52fc00
[ "Apache-2.0" ]
null
null
null
vrp-pragmatic/tests/unit/utils/approx_transportation_test.rs
valerivp/vrp
27ee30e5f4c44e051e5cec1248e606305b52fc00
[ "Apache-2.0" ]
null
null
null
use super::*; use crate::format::Location; use vrp_core::models::common::Profile; use vrp_core::models::problem::{create_matrix_transport_cost, MatrixData}; fn get_test_locations() -> Vec<Location> { vec![ Location::Coordinate { lat: 52.52599, lng: 13.45413 }, Location::Coordinate { lat: 52.5225, lng: 13.4095 }, Location::Coordinate { lat: 52.5165, lng: 13.3808 }, ] } #[test] fn can_calculate_distance_between_two_locations() { let l1 = Location::Coordinate { lat: 52.52599, lng: 13.45413 }; let l2 = Location::Coordinate { lat: 52.5165, lng: 13.3808 }; let distance = get_distance(&l1, &l2); assert_eq!(distance.round(), 5078.); } #[test] fn can_use_approximated_with_matrix_costs() { let profile = Profile::default(); let locations = get_test_locations(); let speed = 10.; let approx_data = get_approx_transportation(&locations, &[speed]); assert_eq!(approx_data.len(), 1); let (durations, distances) = approx_data.first().unwrap(); let durations = durations.iter().map(|&d| d as f64).collect(); let distances = distances.iter().map(|&d| d as f64).collect(); let costs = create_matrix_transport_cost(vec![MatrixData::new(profile.index, None, durations, distances)]) .expect("Cannot create matrix transport costs"); vec![(0, 1, 3048.), (1, 2, 2056.), (2, 0, 5078.)].into_iter().for_each(|(from, to, expected)| { let distance = costs.distance(&profile, from, to, 0.); let duration = costs.duration(&profile, from, to, 0.); assert_eq!(distance.round(), expected); assert_eq!(duration.round(), (distance / speed).round()); }); }
35.553191
110
0.650509
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn can_calculate_distance_between_two_locations() {\n let l1 = Location::Coordinate { lat: 52.52599, lng: 13.45413 };\n let l2 = Location::Coordinate { lat: 52.5165, lng: 13.3808 };\n\n let distance = get_distance(&l1, &l2);\n\n assert_eq!(distance.round(), 5078.);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn can_use_approximated_with_matrix_costs() {\n let profile = Profile::default();\n let locations = get_test_locations();\n let speed = 10.;\n let approx_data = get_approx_transportation(&locations, &[speed]);\n assert_eq!(approx_data.len(), 1);\n\n let (durations, distances) = approx_data.first().unwrap();\n let durations = durations.iter().map(|&d| d as f64).collect();\n let distances = distances.iter().map(|&d| d as f64).collect();\n\n let costs = create_matrix_transport_cost(vec![MatrixData::new(profile.index, None, durations, distances)])\n .expect(\"Cannot create matrix transport costs\");\n\n vec![(0, 1, 3048.), (1, 2, 2056.), (2, 0, 5078.)].into_iter().for_each(|(from, to, expected)| {\n let distance = costs.distance(&profile, from, to, 0.);\n let duration = costs.duration(&profile, from, to, 0.);\n\n assert_eq!(distance.round(), expected);\n assert_eq!(duration.round(), (distance / speed).round());\n });\n}\n}" ]
f709605bba77fdca17f8a97d61f67445f6679f85
8,279
rs
Rust
tests/it/bank_model.rs
obsidiandynamics/stride-rs
517a96ba133ef20af8177701ff7d49f223998879
[ "MIT" ]
null
null
null
tests/it/bank_model.rs
obsidiandynamics/stride-rs
517a96ba133ef20af8177701ff7d49f223998879
[ "MIT" ]
null
null
null
tests/it/bank_model.rs
obsidiandynamics/stride-rs
517a96ba133ef20af8177701ff7d49f223998879
[ "MIT" ]
null
null
null
use std::rc::Rc; use stride::examiner::Record; use stride::havoc::model::{Model, name_of, rand_element}; use stride::havoc::model::ActionResult::{Blocked, Joined, Ran}; use stride::havoc::model::Retention::{Strong, Weak}; use crate::fixtures::schema::CandidateData; use crate::fixtures::schema::MessageKind::CandidateMessage; use crate::harness::{dfs, sim}; use crate::utils::uuidify; use super::fixtures::*; fn asserter( values: &[i32], cohort_index: usize, ) -> impl Fn(&[Cohort]) -> Box<dyn Fn(&[Cohort]) -> Option<String>> { let expected_sum = values.iter().sum::<i32>(); move |_| { Box::new(move |after| { let replica = &after[cohort_index].replica; let mut computed_sum = 0; for &(item_val, _) in &replica.items { if item_val < 0 { return Some(format!("account negative: {:?}", replica)); } computed_sum += item_val; } if expected_sum != computed_sum { Some(format!( "expected: {}, computed: {} for {:?}", expected_sum, computed_sum, replica )) } else { None } }) } } struct BankCfg<'a> { values: &'a [i32], num_cohorts: usize, txns_per_cohort: usize, extents: &'a [usize], name: &'a str, } fn build_model(cfg: BankCfg) -> Model<SystemState> { let num_cohorts = cfg.num_cohorts; let num_certifiers = cfg.extents.len(); let values = cfg.values; let mut model = Model::new(move || SystemState::new(num_cohorts, values, num_certifiers)) .with_name(cfg.name.into()); for cohort_index in 0..cfg.num_cohorts { let itemset = (0..cfg.values.len()) .map(|i| format!("item-{}", i)) .collect::<Vec<_>>(); let txns_per_cohort = cfg.txns_per_cohort; model.add_action(format!("initiator-{}", cohort_index), Weak, move |s, c| { let run = s.cohort_txns(cohort_index); let cohort = &mut s.cohorts[cohort_index]; // list of 'from' accounts that have sufficient funds to initiate a transfer let from_accounts = cohort .replica .items .iter() .enumerate() .filter(|&(_, &(item_val, _))| item_val > 0) .collect::<Vec<_>>(); if from_accounts.is_empty() { return Blocked; } // pick a 'from' account at random let &(from, &(from_val, from_ver)) = rand_element(c, &from_accounts); // list of 'to' accounts that excludes the 'from' account let to_accounts = cohort .replica .items .iter() .enumerate() .filter(|&(item, _)| item != from) .collect::<Vec<_>>(); // pick a 'to' account at random let &(to, &(to_val, to_ver)) = rand_element(c, &to_accounts); // transfer at least half of the value in the 'from' account let xfer_amount = (from_val + 1) / 2; let readset = vec![itemset[from].clone(), itemset[to].clone()]; let writeset = readset.clone(); let cpt_readvers = vec![from_ver, to_ver]; let cpt_snapshot = cohort.replica.ver; let changes = &[(from, from_val - xfer_amount), (to, to_val + xfer_amount)]; let (readvers, snapshot) = Record::compress(cpt_readvers, cpt_snapshot); let statemap = Statemap::map(changes, Op::Set); cohort .stream .produce(Rc::new(CandidateMessage(CandidateData { rec: Record { xid: uuidify(cohort_index, run), readset, writeset, readvers, snapshot, }, statemap, }))); if run + 1 == txns_per_cohort { Joined } else { Ran } }); model.add_action( format!("updater-{}", cohort_index), Weak, updater_action(cohort_index, asserter(cfg.values, cohort_index)), ); model.add_action( format!("replicator-{}", cohort_index), Weak, replicator_action(cohort_index, asserter(cfg.values, cohort_index)), ); } for (certifier_index, &extent) in cfg.extents.iter().enumerate() { model.add_action( format!("certifier-{}", certifier_index), Weak, certifier_action(certifier_index, extent), ); } model.add_action( "supervisor".into(), Strong, supervisor_action(cfg.num_cohorts * cfg.txns_per_cohort), ); model } #[test] fn dfs_bank_2x1x1() { dfs(&build_model(BankCfg { values: &[101, 103], num_cohorts: 1, txns_per_cohort: 1, extents: &[1], name: name_of(&dfs_bank_2x1x1), })); } #[test] fn dfs_bank_2x1x2() { dfs(&build_model(BankCfg { values: &[101, 103], num_cohorts: 1, txns_per_cohort: 2, extents: &[2], name: name_of(&dfs_bank_2x1x2), })); } #[test] #[ignore] fn dfs_bank_2x2x1() { dfs(&build_model(BankCfg { values: &[101, 103], num_cohorts: 2, txns_per_cohort: 1, extents: &[2], name: name_of(&dfs_bank_2x2x1), })); } #[test] #[ignore] fn dfs_bank_2x2x2() { dfs(&build_model(BankCfg { values: &[101, 103], num_cohorts: 2, txns_per_cohort: 2, extents: &[4], name: name_of(&dfs_bank_2x2x2), })); } #[test] fn sim_bank_2x1x1() { sim( &build_model(BankCfg { values: &[101, 103], num_cohorts: 1, txns_per_cohort: 1, extents: &[1], name: name_of(&sim_bank_2x1x1), }), 10, ); } #[test] fn sim_bank_2x2x1() { sim( &build_model(BankCfg { values: &[101, 103], num_cohorts: 2, txns_per_cohort: 1, extents: &[2], name: name_of(&sim_bank_2x2x1), }), 20, ); } #[test] fn sim_bank_2x2x2() { sim( &build_model(BankCfg { values: &[101, 103], num_cohorts: 2, txns_per_cohort: 2, extents: &[4], name: name_of(&sim_bank_2x2x2), }), 40, ); } #[test] fn sim_bank_2x3x1() { sim( &build_model(BankCfg { values: &[101, 103], num_cohorts: 3, txns_per_cohort: 1, extents: &[3], name: name_of(&sim_bank_2x3x1), }), 40, ); } #[test] fn sim_bank_2x3x2() { sim( &build_model(BankCfg { values: &[101, 103], num_cohorts: 3, txns_per_cohort: 2, extents: &[6], name: name_of(&sim_bank_2x3x2), }), 80, ); } #[test] fn sim_bank_3x3x2() { sim( &build_model(BankCfg { values: &[101, 103, 105], num_cohorts: 3, txns_per_cohort: 2, extents: &[6], name: name_of(&sim_bank_3x3x2), }), 160, ); } #[test] fn sim_bank_2x4x1() { sim( &build_model(BankCfg { values: &[101, 103], num_cohorts: 4, txns_per_cohort: 1, extents: &[4], name: name_of(&sim_bank_2x4x1), }), 80, ); } #[test] fn sim_bank_2x4x2() { sim( &build_model(BankCfg { values: &[101, 103], num_cohorts: 4, txns_per_cohort: 2, extents: &[8], name: name_of(&sim_bank_2x4x2), }), 160, ); } #[test] fn sim_bank_3x4x2() { sim( &build_model(BankCfg { values: &[101, 103, 105], num_cohorts: 4, txns_per_cohort: 2, extents: &[8], name: name_of(&sim_bank_3x4x2), }), 160, ); }
26.116719
93
0.491122
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn dfs_bank_2x1x1() {\n dfs(&build_model(BankCfg {\n values: &[101, 103],\n num_cohorts: 1,\n txns_per_cohort: 1,\n extents: &[1],\n name: name_of(&dfs_bank_2x1x1),\n }));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn dfs_bank_2x1x2() {\n dfs(&build_model(BankCfg {\n values: &[101, 103],\n num_cohorts: 1,\n txns_per_cohort: 2,\n extents: &[2],\n name: name_of(&dfs_bank_2x1x2),\n }));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn dfs_bank_2x2x1() {\n dfs(&build_model(BankCfg {\n values: &[101, 103],\n num_cohorts: 2,\n txns_per_cohort: 1,\n extents: &[2],\n name: name_of(&dfs_bank_2x2x1),\n }));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn dfs_bank_2x2x2() {\n dfs(&build_model(BankCfg {\n values: &[101, 103],\n num_cohorts: 2,\n txns_per_cohort: 2,\n extents: &[4],\n name: name_of(&dfs_bank_2x2x2),\n }));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn sim_bank_2x1x1() {\n sim(\n &build_model(BankCfg {\n values: &[101, 103],\n num_cohorts: 1,\n txns_per_cohort: 1,\n extents: &[1],\n name: name_of(&sim_bank_2x1x1),\n }),\n 10,\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn sim_bank_2x2x1() {\n sim(\n &build_model(BankCfg {\n values: &[101, 103],\n num_cohorts: 2,\n txns_per_cohort: 1,\n extents: &[2],\n name: name_of(&sim_bank_2x2x1),\n }),\n 20,\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn sim_bank_2x2x2() {\n sim(\n &build_model(BankCfg {\n values: &[101, 103],\n num_cohorts: 2,\n txns_per_cohort: 2,\n extents: &[4],\n name: name_of(&sim_bank_2x2x2),\n }),\n 40,\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn sim_bank_2x3x1() {\n sim(\n &build_model(BankCfg {\n values: &[101, 103],\n num_cohorts: 3,\n txns_per_cohort: 1,\n extents: &[3],\n name: name_of(&sim_bank_2x3x1),\n }),\n 40,\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn sim_bank_2x3x2() {\n sim(\n &build_model(BankCfg {\n values: &[101, 103],\n num_cohorts: 3,\n txns_per_cohort: 2,\n extents: &[6],\n name: name_of(&sim_bank_2x3x2),\n }),\n 80,\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn sim_bank_3x3x2() {\n sim(\n &build_model(BankCfg {\n values: &[101, 103, 105],\n num_cohorts: 3,\n txns_per_cohort: 2,\n extents: &[6],\n name: name_of(&sim_bank_3x3x2),\n }),\n 160,\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn sim_bank_2x4x1() {\n sim(\n &build_model(BankCfg {\n values: &[101, 103],\n num_cohorts: 4,\n txns_per_cohort: 1,\n extents: &[4],\n name: name_of(&sim_bank_2x4x1),\n }),\n 80,\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn sim_bank_2x4x2() {\n sim(\n &build_model(BankCfg {\n values: &[101, 103],\n num_cohorts: 4,\n txns_per_cohort: 2,\n extents: &[8],\n name: name_of(&sim_bank_2x4x2),\n }),\n 160,\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn sim_bank_3x4x2() {\n sim(\n &build_model(BankCfg {\n values: &[101, 103, 105],\n num_cohorts: 4,\n txns_per_cohort: 2,\n extents: &[8],\n name: name_of(&sim_bank_3x4x2),\n }),\n 160,\n );\n}\n}" ]
f70963044d83e3f5e4cbfb3b09f29a5cdfcf378a
5,832
rs
Rust
src/sound_data.rs
hnen/ears
021a94ba0b0ef71ad38ad26c961d235149768872
[ "MIT" ]
89
2016-01-23T09:11:10.000Z
2022-01-21T17:41:41.000Z
src/sound_data.rs
hnen/ears
021a94ba0b0ef71ad38ad26c961d235149768872
[ "MIT" ]
23
2016-01-28T23:32:00.000Z
2021-09-22T13:23:40.000Z
src/sound_data.rs
hnen/ears
021a94ba0b0ef71ad38ad26c961d235149768872
[ "MIT" ]
18
2016-02-25T22:06:52.000Z
2022-01-28T18:40:16.000Z
// The MIT License (MIT) // // Copyright (c) 2013 Jeremy Letang (letang.jeremy@gmail.com) // // Permission is hereby granted, free of charge, to any person obtaining a copy of // this software and associated documentation files (the "Software"), to deal in // the Software without restriction, including without limitation the rights to // use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of // the Software, and to permit persons to whom the Software is furnished to do so, // subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS // FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR // COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER // IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN // CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. //! The datas extracted from a sound file. use std::mem; use libc::c_void; use std::vec::Vec; use openal::{ffi, al}; use sndfile::{SndFile, SndInfo}; use sndfile::OpenMode::Read; use internal::OpenAlData; use audio_tags::{Tags, AudioTags, get_sound_tags}; /** * Samples extracted from a file. * * SoundDatas are made to be shared between several Sound and played in the same * time. * * # Example * ```ignore * use ears::{Sound, SoundData, AudioController}; * use std::cell::RefCell; * use std::rc::Rc; * * fn main() -> () { * // Create a SoundData * let snd_data = Rc::new(RefCell::new(SoundData::new("path/to/my/sound.wav") * .unwrap())); * * // Create two Sound with the same SoundData * let mut snd1 = Sound::new_with_data(snd_data.clone()).unwrap(); * let mut snd2 = Sound::new_with_data(snd_data.clone()).unwrap(); * * // Play the sounds * snd1.play(); * snd2.play(); * * // Wait until snd2 is playing * while snd2.is_playing() {} * } * ``` */ pub struct SoundData { /// The SoundTags who contains all the information of the sound sound_tags: Tags, /// The sndfile samples information snd_info: SndInfo, /// The total samples count of the Sound nb_sample: i64, /// The OpenAl internal identifier for the buffer al_buffer: u32 } impl SoundData { /** * Create a new SoundData. * * The SoundData contains all the information extracted from the * file: samples and tags. * It's an easy way to share the same samples between man Sounds objects. * * # Arguments * * `path` - The path of the file to load * * # Return * A `Result` containing Ok(SoundData) on success, Err(String) * if there has been an error. */ pub fn new(path: &str) -> Result<SoundData, String> { check_openal_context!(Err("Invalid OpenAL context.".into())); let mut file = match SndFile::new(path, Read) { Ok(file) => file, Err(err) => { return Err(format!("Error while loading sound file: {}", err)); } }; let infos = file.get_sndinfo(); let nb_sample = infos.channels as i64 * infos.frames; let mut samples = vec![0i16; nb_sample as usize]; file.read_i16(&mut samples[..], nb_sample as i64); let mut buffer_id = 0; let len = mem::size_of::<i16>() * (samples.len()); // Retrieve format informations let format = match al::get_channels_format(infos.channels) { Some(fmt) => fmt, None => { return Err("Unrecognized sound format.".into()); } }; al::alGenBuffers(1, &mut buffer_id); al::alBufferData(buffer_id, format, samples.as_ptr() as *mut c_void, len as i32, infos.samplerate); if let Some(err) = al::openal_has_error() { return Err(format!("Internal OpenAL error: {}", err)); }; let sound_data = SoundData { sound_tags: get_sound_tags(&file), snd_info: infos, nb_sample: nb_sample, al_buffer: buffer_id }; file.close(); Ok(sound_data) } } /** * Get the sound file infos. * * # Return * The struct SndInfo. */ pub fn get_sndinfo<'r>(s_data: &'r SoundData) -> &'r SndInfo { &s_data.snd_info } /** * Get the OpenAL identifier of the samples buffer. * * # Return * The OpenAL internal identifier for the samples buffer of the sound. */ #[doc(hidden)] pub fn get_buffer(s_data: &SoundData) -> u32 { s_data.al_buffer } impl AudioTags for SoundData { /** * Get the tags of a Sound. * * # Return * A borrowed pointer to the internal struct SoundTags */ fn get_tags(&self) -> Tags { self.sound_tags.clone() } } impl Drop for SoundData { /// Destroy all the resources attached to the SoundData fn drop(&mut self) -> () { unsafe { ffi::alDeleteBuffers(1, &mut self.al_buffer); } } } #[cfg(test)] mod test { #![allow(non_snake_case)] #[allow(unused_variables)] use sound_data::SoundData; #[test] #[ignore] fn sounddata_create_OK() -> () { #![allow(unused_variables)] let snd_data = SoundData::new("res/shot.wav").unwrap(); } #[test] #[ignore] #[should_panic] fn sounddata_create_FAIL() -> () { #![allow(unused_variables)] let snd_data = SoundData::new("toto.wav").unwrap(); } }
28.173913
83
0.609053
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn sounddata_create_OK() -> () {\n #![allow(unused_variables)]\n let snd_data = SoundData::new(\"res/shot.wav\").unwrap();\n\n }\n}" ]
f709b841e047df77598f689982f42579fe62929b
20,912
rs
Rust
rust-autograd/src/ops/dot_ops.rs
jiguanglizipao/sgx-mnist
3d196d27d7b2aafbf1ab4952f4563ee990ed4b1a
[ "MIT" ]
2
2020-06-14T13:54:39.000Z
2020-06-14T14:09:04.000Z
rust-autograd/src/ops/dot_ops.rs
jiguanglizipao/sgx-mnist
3d196d27d7b2aafbf1ab4952f4563ee990ed4b1a
[ "MIT" ]
null
null
null
rust-autograd/src/ops/dot_ops.rs
jiguanglizipao/sgx-mnist
3d196d27d7b2aafbf1ab4952f4563ee990ed4b1a
[ "MIT" ]
null
null
null
use ndarray; use ndarray_ext::NdArray; use op; #[cfg(feature = "mkl")] use same_type; #[cfg(feature = "mkl")] use std::mem; use tensor::Tensor; use Float; #[cfg(feature = "mkl")] type MklInt = i64; #[cfg(feature = "mkl")] #[repr(C)] #[derive(Clone, Copy, Debug)] enum CblasTranspose { CblasNoTrans = 111, CblasTrans = 112, // CblasConjTrans = 113, } #[cfg(feature = "mkl")] type CblasLayout = usize; #[cfg(feature = "mkl")] extern "C" { // sgemm from intel MKL fn cblas_sgemm( layout: CblasLayout, transa: CblasTranspose, transb: CblasTranspose, m: MklInt, n: MklInt, k: MklInt, alpha: libc::c_float, a: *const libc::c_float, lda: MklInt, b: *const libc::c_float, ldb: MklInt, beta: libc::c_float, c: *mut libc::c_float, ldc: MklInt, ); // dgemm from intel MKL fn cblas_dgemm( layout: CblasLayout, transa: CblasTranspose, transb: CblasTranspose, m: MklInt, n: MklInt, k: MklInt, alpha: libc::c_double, a: *const libc::c_double, lda: MklInt, b: *const libc::c_double, ldb: MklInt, beta: libc::c_double, c: *mut libc::c_double, ldc: MklInt, ); // Batched sgemm from intel MKL fn cblas_sgemm_batch( layout: CblasLayout, transa_array: *const CblasTranspose, // batch of CblasTranspose transb_array: *const CblasTranspose, // batch of CblasTranspose m_array: *const MklInt, // batch of m n_array: *const MklInt, // batch of n k_array: *const MklInt, // batch of k alpha_array: *const libc::c_float, // batch of alpha a_array: *const *const libc::c_float, // a lda_array: *const MklInt, // batch of lda b_array: *const *const libc::c_float, // b ldb_array: *const MklInt, // batch of ldb beta_array: *const libc::c_float, // batch of beta c_array: *mut *mut libc::c_float, // c ldc_array: *const MklInt, // batch of odc group_count: MklInt, // batch size group_size: *const MklInt, ); // num of matrices in each batch // Batched sgemm from intel MKL fn cblas_dgemm_batch( layout: CblasLayout, transa_array: *const CblasTranspose, // batch of CblasTranspose transb_array: *const CblasTranspose, // batch of CblasTranspose m_array: *const MklInt, // batch of m n_array: *const MklInt, // batch of n k_array: *const MklInt, // batch of k alpha_array: *const libc::c_double, // batch of alpha a_array: *const *const libc::c_double, // a lda_array: *const MklInt, // batch of lda b_array: *const *const libc::c_double, // b ldb_array: *const MklInt, // batch of ldb beta_array: *const libc::c_double, // batch of beta c_array: *mut *mut libc::c_double, // c ldc_array: *const MklInt, // batch of odc group_count: MklInt, // batch size group_size: *const MklInt, ); // num of matrices in each batch } #[cfg(feature = "mkl")] #[inline] pub fn cblas_sgemm_wrapper( trans_a: bool, trans_b: bool, m: usize, n: usize, k: usize, alpha: f32, a: *const f32, b: *const f32, beta: f32, c: *mut f32, ) { let lda = if trans_a { m } else { k } as MklInt; let ldb = if trans_b { k } else { n } as MklInt; let ldc = n as MklInt; let trans_a = if trans_a { CblasTranspose::CblasTrans } else { CblasTranspose::CblasNoTrans }; let trans_b = if trans_b { CblasTranspose::CblasTrans } else { CblasTranspose::CblasNoTrans }; unsafe { const CBLAS_ROW_MAGER: usize = 101; cblas_sgemm( CBLAS_ROW_MAGER, trans_a, trans_b, m as MklInt, n as MklInt, k as MklInt, alpha, a, lda, b, ldb, beta, c, ldc, ); } } #[test] #[cfg(feature = "mkl")] fn test_sgemm() { let x = vec![1., 2., 3., 4.]; // (2, 2) let y = vec![1., 2., 3., 4.]; // (2, 2) let mut z = uninitialized_vec::<f32>(4); // (2, 2, 2) cblas_sgemm_wrapper( false, false, 2, // m 2, // n 2, // k 1., // alpha x.as_ptr(), y.as_ptr(), // b 0., // beta z.as_mut_ptr(), ); assert_eq!(z, vec![7., 10., 15., 22.]); } #[inline] #[cfg(feature = "mkl")] pub fn cblas_dgemm_wrapper( trans_a: bool, trans_b: bool, m: usize, n: usize, k: usize, alpha: f64, a: *const f64, b: *const f64, beta: f64, c: *mut f64, ) { let lda = if trans_a { m } else { k } as MklInt; let ldb = if trans_b { k } else { n } as MklInt; let ldc = n as MklInt; let trans_a = if trans_a { CblasTranspose::CblasTrans } else { CblasTranspose::CblasNoTrans }; let trans_b = if trans_b { CblasTranspose::CblasTrans } else { CblasTranspose::CblasNoTrans }; unsafe { const CBLAS_ROW_MAGER: usize = 101; cblas_dgemm( CBLAS_ROW_MAGER, trans_a, trans_b, m as MklInt, n as MklInt, k as MklInt, alpha, a, lda, b, ldb, beta, c, ldc, ); } } #[inline] #[cfg(feature = "mkl")] pub fn cblas_sgemm_batch_wrapper( trans_a: bool, trans_b: bool, m: usize, n: usize, k: usize, alpha: &[f32], a_array: Vec<*const f32>, b_array: Vec<*const f32>, beta: &[f32], c_array: Vec<*const f32>, group_count: usize, size_per_group: usize, ) { let size_per_group = size_per_group as usize; let lda = if trans_a { m } else { k } as MklInt; let ldb = if trans_b { k } else { n } as MklInt; let ldc = n as MklInt; let trans_a = if trans_a { CblasTranspose::CblasTrans } else { CblasTranspose::CblasNoTrans }; let trans_b = if trans_b { CblasTranspose::CblasTrans } else { CblasTranspose::CblasNoTrans }; unsafe { const CBLAS_ROW_MAGER: usize = 101; cblas_sgemm_batch( CBLAS_ROW_MAGER, vec![trans_a; group_count].as_slice().as_ptr(), vec![trans_b; group_count].as_slice().as_ptr(), vec![m as MklInt; group_count].as_slice().as_ptr(), vec![n as MklInt; group_count].as_slice().as_ptr(), vec![k as MklInt; group_count].as_slice().as_ptr(), alpha.as_ptr(), mem::transmute(a_array.as_slice().as_ptr()), // safe vec![lda as MklInt; group_count].as_slice().as_ptr(), mem::transmute(b_array.as_slice().as_ptr()), // safe vec![ldb as MklInt; group_count].as_slice().as_ptr(), beta.as_ptr(), mem::transmute(c_array.as_slice().as_ptr()), // ??? vec![ldc as MklInt; group_count].as_slice().as_ptr(), group_count as MklInt, vec![size_per_group as MklInt; group_count] .as_slice() .as_ptr(), ); } } #[inline] #[cfg(feature = "mkl")] pub fn cblas_dgemm_batch_wrapper( trans_a: bool, trans_b: bool, m: usize, n: usize, k: usize, alpha: &[f64], a_array: Vec<*const f64>, b_array: Vec<*const f64>, beta: &[f64], c_array: Vec<*const f64>, group_count: usize, size_per_group: usize, ) { let size_per_group = size_per_group as usize; let lda = if trans_a { m } else { k } as MklInt; let ldb = if trans_b { k } else { n } as MklInt; let ldc = n as MklInt; let trans_a = if trans_a { CblasTranspose::CblasTrans } else { CblasTranspose::CblasNoTrans }; let trans_b = if trans_b { CblasTranspose::CblasTrans } else { CblasTranspose::CblasNoTrans }; unsafe { const CBLAS_ROW_MAGER: usize = 101; cblas_dgemm_batch( CBLAS_ROW_MAGER, vec![trans_a; group_count].as_slice().as_ptr(), vec![trans_b; group_count].as_slice().as_ptr(), vec![m as MklInt; group_count].as_slice().as_ptr(), vec![n as MklInt; group_count].as_slice().as_ptr(), vec![k as MklInt; group_count].as_slice().as_ptr(), alpha.as_ptr(), mem::transmute(a_array.as_slice().as_ptr()), // safe vec![lda as MklInt; group_count].as_slice().as_ptr(), mem::transmute(b_array.as_slice().as_ptr()), // safe vec![ldb as MklInt; group_count].as_slice().as_ptr(), beta.as_ptr(), mem::transmute(c_array.as_slice().as_ptr()), // ??? vec![ldc as MklInt; group_count].as_slice().as_ptr(), group_count as MklInt, vec![size_per_group as MklInt; group_count] .as_slice() .as_ptr(), ); } } #[test] #[cfg(feature = "mkl")] fn test_dgemm_batch_trans_a() { let batch = 2; let w = vec![0., 1., 2., 3., 4., 5.]; // (2, 3) let x = vec![0., 1., 2., 3., 4., 5., 6., 7.]; // (2, 2, 2) let z = uninitialized_vec::<f64>(12); // (2, 2, 2) let m = 3; // row of op(a) let n = 2; // col of op(b) let k = 2; // col of op(a) cblas_dgemm_batch_wrapper( true, false, m, n, k, &[1.], // alpha vec![&w[0], &w[0]], // a get_region_heads(batch, &x), &[0.], // beta get_region_heads(batch, &z), 1, batch, ); assert_eq!( z, vec![6., 9., 8., 13., 10., 17., 18., 21., 28., 33., 38., 45.] ); } #[test] #[cfg(feature = "mkl")] fn test_dgemm_batch() { let batch = 2; let x = vec![0., 1., 2., 3.]; // (2, 2) let y = vec![0., 1., 2., 3., 4., 5., 6., 7.]; // (2, 2, 2) let z = uninitialized_vec::<f64>(8); // (2, 2, 2) cblas_dgemm_batch_wrapper( false, false, 2, // m 2, // n 2, // k &[1.], // alpha vec![&x[0], &x[0]], // a get_region_heads(batch, &y), // b &[0.], // beta get_region_heads(batch, &z), // c 1, batch, ); assert_eq!(z, vec![2., 3., 6., 11., 6., 7., 26., 31.]); } // `Tensordot` is implemented in `ops/mod.rs`. pub struct MatMul { pub transpose_a: bool, pub transpose_b: bool, } pub struct BatchMatMul { pub transpose_a: bool, pub transpose_b: bool, } #[inline] #[doc(hidden)] pub fn uninitialized_vec<T: Float>(size: usize) -> Vec<T> { let mut buf = Vec::with_capacity(size); unsafe { buf.set_len(size); } buf } #[cfg(feature = "mkl")] macro_rules! mkl_mm { ($f:expr, $x0:expr, $x1:expr, $x0_shape:expr, $x1_shape:expr, $self:expr, $typ:ty) => {{ let row0 = $x0_shape[0]; // rows of a let col0 = $x0_shape[1]; // cols of a let row1 = $x1_shape[0]; // rows of b let col1 = $x1_shape[1]; // cols of b let m = if $self.transpose_a { col0 } else { row0 }; let n = if $self.transpose_b { row1 } else { col1 }; let k = if $self.transpose_a { row0 } else { col0 }; let ret_row = if $self.transpose_a { col0 } else { row0 }; let ret_col = if $self.transpose_b { $x1_shape[0] } else { col1 }; let mut c = uninitialized_vec::<T>(ret_row * ret_col); $f( $self.transpose_a, $self.transpose_b, m, n, k, 1., $x0.as_ptr() as *const $typ, $x1.as_ptr() as *const $typ, 0., c.as_mut_ptr() as *mut $typ, ); vec![Ok(NdArray::from_shape_vec( ndarray::IxDyn(&[ret_row, ret_col]), c, ) .unwrap())] }}; } #[cfg(feature = "mkl")] macro_rules! mkl_batch_mm { ($f:expr, $x0:expr, $x1:expr, $row0:expr, $col0:expr, $row1:expr, $col1:expr, $ret_shape:expr, $self:expr, $batch_size:expr) => {{ let m = if $self.transpose_a { $col0 } else { $row0 }; // rows of a let n = if $self.transpose_b { $row1 } else { $col1 }; // cols of b let k = if $self.transpose_a { $row0 } else { $col0 }; // cols of a let ret = uninitialized_vec($ret_shape.iter().product()); $f( $self.transpose_a, $self.transpose_b, m, n, k, &[1.], get_region_heads($batch_size, $x0.as_slice().expect("Not standard layout")), // a array get_region_heads($batch_size, $x1.as_slice().expect("Not standard layout")), // b array &[0.], get_region_heads($batch_size, ret.as_slice()), // c array 1, $batch_size, ); vec![Ok(NdArray::from_shape_vec( ndarray::IxDyn($ret_shape.as_slice()), ret, ) .unwrap())] }}; } impl<T: Float> op::Op<T> for MatMul { fn name(&self) -> &str { "MatMul" } fn compute(&self, ctx: ::runtime::OpComputeContext<T>) -> op::ComputeResult<T> { let xs = ctx.grab_inputs(); let x0 = xs[0]; let x1 = xs[1]; let x0_shape = x0.shape(); let x1_shape = x1.shape(); assert_eq!( x0_shape.len(), 2, "First input to matmul should be a matrix" ); assert_eq!( x1_shape.len(), 2, "Second input to matmul should be a matrix" ); #[cfg(feature = "mkl")] { if same_type::<T, f32>() { mkl_mm!(cblas_sgemm_wrapper, x0, x1, x0_shape, x1_shape, self, f32) } else if same_type::<T, f64>() { mkl_mm!(cblas_dgemm_wrapper, x0, x1, x0_shape, x1_shape, self, f64) } else { panic!("gemm supports only f32 and f64.") } } #[cfg(not(feature = "mkl"))] { let x0_view = x0.view(); let x1_view = x1.view(); // unwrap is always safe let mut a = x0_view.into_shape((x0_shape[0], x0_shape[1])).unwrap(); let mut b = x1_view.into_shape((x1_shape[0], x1_shape[1])).unwrap(); if self.transpose_a { a.swap_axes(0, 1); } if self.transpose_b { b.swap_axes(0, 1); } vec![Ok(a.dot(&b).into_dyn())] } } fn grad(&self, gy: &Tensor<T>, inputs: &[&Tensor<T>], _: &Tensor<T>) -> Vec<Option<Tensor<T>>> { let opa = Tensor::builder() .set_inputs(vec![gy, inputs[1]]) .build(MatMul { transpose_a: false, transpose_b: true, }); let opb = Tensor::builder() .set_inputs(vec![inputs[0], gy]) .build(MatMul { transpose_a: true, transpose_b: false, }); vec![Some(opa), Some(opb)] } } #[inline] pub fn get_region_heads<'a, A: Float, B>(batch_size: usize, slice: &[A]) -> Vec<*const B> { let head = slice.as_ptr(); let size_per_sample = slice.len() / batch_size; let mut ret = Vec::with_capacity(batch_size); for i in 0..batch_size { unsafe { ret.push(head.offset((i * size_per_sample) as isize) as *const B); } } ret } impl<T: Float> op::Op<T> for BatchMatMul { fn name(&self) -> &str { "BatchMatMul" } fn compute(&self, ctx: ::runtime::OpComputeContext<T>) -> op::ComputeResult<T> { let xs = ctx.grab_inputs(); let x0: &NdArray<T> = xs[0]; let x1: &NdArray<T> = xs[1]; let shape0 = x0.shape(); let shape1 = x1.shape(); let rank0 = x0.ndim(); let rank1 = x1.ndim(); if rank0 != rank1 || shape0[..rank0 - 2] != shape1[..rank0 - 2] { panic!("Input shapes mismatch: {:?} vs {:?}", shape0, shape1); } let row0 = shape0[rank0 - 2]; let col0 = shape0[rank0 - 1]; let col1 = shape1[rank0 - 1]; #[cfg(feature = "mkl")] { let batch_size: usize = shape0[..rank0 - 2].iter().product(); let row1 = shape1[rank1 - 2]; let ret_shape = { let mut ret = shape0.to_vec(); ret[rank0 - 2] = if self.transpose_a { col0 } else { row0 }; ret[rank0 - 1] = if self.transpose_b { row1 } else { col1 }; ret }; if same_type::<T, f32>() { mkl_batch_mm!( cblas_sgemm_batch_wrapper, x0, x1, row0, col0, row1, col1, ret_shape, self, batch_size ) } else if same_type::<T, f64>() { mkl_batch_mm!( cblas_dgemm_batch_wrapper, x0, x1, row0, col0, row1, col1, ret_shape, self, batch_size ) } else { panic!("gemm supports only f32 and f64.") } } #[cfg(not(feature = "mkl"))] { use ndarray_ext; use rayon::iter::*; // squashes dims (remains last two dims) // unwrap is always safe let x0_flattened = { let mut a = x0 .view() .into_shape((x0.len() / row0 / col0, row0, col0)) .unwrap(); if self.transpose_a { a.swap_axes(1, 2); } a }; let row1 = shape1[rank0 - 2]; let x1_flattened = { let mut b = x1 .view() .into_shape((x1.len() / row1 / col1, row1, col1)) .unwrap(); if self.transpose_b { b.swap_axes(1, 2); } b }; // parallel mm let dot = (0..x0_flattened.shape()[0] as isize) .into_par_iter() .map(|i| { let x0_mat = x0_flattened .slice(s![i..i + 1, .., ..]) .remove_axis(ndarray::Axis(0)) .to_owned(); let x1_mat = x1_flattened .slice(s![i..i + 1, .., ..]) .remove_axis(ndarray::Axis(0)) .to_owned(); x0_mat.dot(&x1_mat).into_dyn() }) .collect::<Vec<_>>(); // owned to ref let mut dot_view = Vec::with_capacity(dot.len()); for i in 0..dot.len() { dot_view.push(ndarray_ext::expand_dims_view(dot[i].view(), 0)); } // stack dot result let stacked = ndarray::stack(ndarray::Axis(0), dot_view.as_slice()).unwrap(); let dst_shape = { let stacked_shape = stacked.shape(); shape0[..rank0 - 2] .into_iter() .chain(&[stacked_shape[1], stacked_shape[2]]) .cloned() .collect::<Vec<usize>>() }; // reshape to dst shape with safe unwrapping vec![Ok(stacked .into_shape(ndarray::IxDyn(dst_shape.as_slice())) .unwrap())] } } fn grad(&self, gy: &Tensor<T>, inputs: &[&Tensor<T>], _: &Tensor<T>) -> Vec<Option<Tensor<T>>> { let opa = Tensor::builder() .set_inputs(vec![gy, inputs[1]]) .build(BatchMatMul { transpose_a: false, transpose_b: true, }); let opb = Tensor::builder() .set_inputs(vec![inputs[0], gy]) .build(BatchMatMul { transpose_a: true, transpose_b: false, }); vec![Some(opa), Some(opb)] } }
29.536723
100
0.475851
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_sgemm() {\n let x = vec![1., 2., 3., 4.]; // (2, 2)\n let y = vec![1., 2., 3., 4.]; // (2, 2)\n let mut z = uninitialized_vec::<f32>(4); // (2, 2, 2)\n\n cblas_sgemm_wrapper(\n false,\n false,\n 2, // m\n 2, // n\n 2, // k\n 1., // alpha\n x.as_ptr(),\n y.as_ptr(), // b\n 0., // beta\n z.as_mut_ptr(),\n );\n assert_eq!(z, vec![7., 10., 15., 22.]);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_dgemm_batch_trans_a() {\n let batch = 2;\n let w = vec![0., 1., 2., 3., 4., 5.]; // (2, 3)\n let x = vec![0., 1., 2., 3., 4., 5., 6., 7.]; // (2, 2, 2)\n let z = uninitialized_vec::<f64>(12); // (2, 2, 2)\n let m = 3; // row of op(a)\n let n = 2; // col of op(b)\n let k = 2; // col of op(a)\n cblas_dgemm_batch_wrapper(\n true,\n false,\n m,\n n,\n k,\n &[1.], // alpha\n vec![&w[0], &w[0]], // a\n get_region_heads(batch, &x),\n &[0.], // beta\n get_region_heads(batch, &z),\n 1,\n batch,\n );\n assert_eq!(\n z,\n vec![6., 9., 8., 13., 10., 17., 18., 21., 28., 33., 38., 45.]\n );\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_dgemm_batch() {\n let batch = 2;\n let x = vec![0., 1., 2., 3.]; // (2, 2)\n let y = vec![0., 1., 2., 3., 4., 5., 6., 7.]; // (2, 2, 2)\n let z = uninitialized_vec::<f64>(8); // (2, 2, 2)\n\n cblas_dgemm_batch_wrapper(\n false,\n false,\n 2, // m\n 2, // n\n 2, // k\n &[1.], // alpha\n vec![&x[0], &x[0]], // a\n get_region_heads(batch, &y), // b\n &[0.], // beta\n get_region_heads(batch, &z), // c\n 1,\n batch,\n );\n assert_eq!(z, vec![2., 3., 6., 11., 6., 7., 26., 31.]);\n}\n}" ]
f709bbcdd08606d48ab25882256be89f835dc7b5
5,315
rs
Rust
gladis/src/lib.rs
xou816/gladis
85cf17f51bd42b3f964769ef763c3cb102091c22
[ "Apache-2.0", "MIT" ]
null
null
null
gladis/src/lib.rs
xou816/gladis
85cf17f51bd42b3f964769ef763c3cb102091c22
[ "Apache-2.0", "MIT" ]
null
null
null
gladis/src/lib.rs
xou816/gladis
85cf17f51bd42b3f964769ef763c3cb102091c22
[ "Apache-2.0", "MIT" ]
null
null
null
//! Easily import Glade-generated UI files into Rust code. //! //! ``` //! use gtk::prelude::*; //! use gladis::Gladis; //! //! const GLADE_SRC: &str = r#" //! <?xml version="1.0" encoding="UTF-8"?> //! <!-- Generated with glade 3.22.2 --> //! <interface> //! <requires lib="gtk+" version="3.20"/> //! <object class="GtkApplicationWindow" id="window"> //! <property name="can_focus">False</property> //! <child type="titlebar"> //! <placeholder/> //! </child> //! <child> //! <object class="GtkLabel" id="label"> //! <property name="visible">True</property> //! <property name="can_focus">False</property> //! <property name="label" translatable="yes">label</property> //! </object> //! </child> //! </object> //! </interface>"#; //! //! #[derive(Gladis, Clone)] //! pub struct Window { //! pub window: gtk::ApplicationWindow, //! pub label: gtk::Label, //! } //! //! gtk::init().unwrap(); //! let _ui = Window::from_string(GLADE_SRC).unwrap(); //! ``` use std::{error::Error, fmt::Display}; #[derive(Debug, Clone)] pub struct NotFoundError { pub identifier: String, pub typ: String, } impl Display for NotFoundError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( f, "identifier {} of type {} was not found", self.identifier, self.typ ) } } impl Error for NotFoundError {} #[derive(Debug, Clone)] pub enum GladisError { NotFound(NotFoundError), } impl Display for GladisError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { GladisError::NotFound(e) => write!(f, "not found error: {}", e), } } } impl Error for GladisError { fn source(&self) -> Option<&(dyn Error + 'static)> { match self { GladisError::NotFound(e) => Some(e), } } } impl GladisError { pub fn not_found<T>(identifier: T, typ: T) -> Self where T: ToString, { let identifier = identifier.to_string(); let typ = typ.to_string(); GladisError::NotFound(NotFoundError { identifier, typ }) } } pub type Result<T> = std::result::Result<T, GladisError>; pub trait Gladis { //! A trait to load a struct from a builder. //! //! # Automatic implementation //! //! This trait wakes little sense alone, but truly show its power when used //! with the [gladis_proc_macro](https://docs.rs/gladis_proc_macro) crate //! and its `#[derive(Gladis)]` macro. //! //! ``` //! use gtk::prelude::*; //! use gladis::Gladis; //! //! #[derive(Gladis, Clone)] //! pub struct Window { //! pub window: gtk::ApplicationWindow, //! pub label: gtk::Label, //! } //! ``` //! //! # Manual implementation //! //! Below is an example of manual implementation of the trait. //! //! ``` //! use gtk::prelude::*; //! use gladis::{Gladis, Result, GladisError}; //! //! pub struct Window { //! pub window: gtk::ApplicationWindow, //! pub label: gtk::Label, //! } //! //! impl Gladis for Window { //! fn from_builder(builder: gtk::Builder) -> Result<Self> { //! let window: gtk::ApplicationWindow = builder //! .get_object("window") //! .ok_or(GladisError::not_found("window", "gtk::ApplicationWindow"))?; //! //! let label: gtk::Label = builder //! .get_object("label") //! .ok_or(GladisError::not_found("label", "gtk::Label"))?; //! //! Ok(Self { window, label }) //! } //! } //! ``` /// Populate struct from a builder. /// /// This method should not be called directly but is used as a common /// function for the `from_string` and `from_resource` functions to /// share the same code. fn from_builder(builder: gtk::Builder) -> Result<Self> where Self: std::marker::Sized; /// Populate struct from a Glade document. fn from_string(src: &str) -> Result<Self> where Self: std::marker::Sized, { let builder = gtk::Builder::from_string(src); Gladis::from_builder(builder) } /// Populate struct from a Glade document as a resource. fn from_resource(resource_path: &str) -> Result<Self> where Self: std::marker::Sized, { let builder = gtk::Builder::from_resource(resource_path); Gladis::from_builder(builder) } } // Re-export #[derive(Gladis)]. #[cfg(feature = "derive")] #[doc(hidden)] pub use gladis_proc_macro::Gladis; #[cfg(test)] mod tests { use crate::{GladisError, NotFoundError}; #[test] fn fmt_not_found_error() { let err = NotFoundError { identifier: "foo".to_string(), typ: "bar".to_string(), }; assert_eq!(err.to_string(), "identifier foo of type bar was not found"); } #[test] fn fmt_gladis_error() { let err = GladisError::NotFound(NotFoundError { identifier: "foo".to_string(), typ: "bar".to_string(), }); assert_eq!(err.to_string(), "not found error: identifier foo of type bar was not found"); } }
27.396907
97
0.557479
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn fmt_not_found_error() {\n let err = NotFoundError {\n identifier: \"foo\".to_string(),\n typ: \"bar\".to_string(),\n };\n assert_eq!(err.to_string(), \"identifier foo of type bar was not found\");\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn fmt_gladis_error() {\n let err = GladisError::NotFound(NotFoundError {\n identifier: \"foo\".to_string(),\n typ: \"bar\".to_string(),\n });\n assert_eq!(err.to_string(), \"not found error: identifier foo of type bar was not found\");\n }\n}" ]
f709d0e63b80a3c1b2a379da98bdf3414f921a01
8,198
rs
Rust
openexr-rs/src/keycode.rs
luke-titley/openexr-bind
4cbffb2e38fe084350d15d9e3e4acfb4a280d581
[ "Apache-2.0" ]
null
null
null
openexr-rs/src/keycode.rs
luke-titley/openexr-bind
4cbffb2e38fe084350d15d9e3e4acfb4a280d581
[ "Apache-2.0" ]
null
null
null
openexr-rs/src/keycode.rs
luke-titley/openexr-bind
4cbffb2e38fe084350d15d9e3e4acfb4a280d581
[ "Apache-2.0" ]
null
null
null
use openexr_sys as sys; use crate::Error; type Result<T, E = Error> = std::result::Result<T, E>; /// A KeyCode object uniquely identifies a motion picture film frame. /// The following fields specifiy film manufacturer, film type, film /// roll and the frame's position within the roll. /// /// # Fields /// /// * `film_mfc_code` - Film manufacturer code. /// Range: `[0, 99]` /// * `filmType` - Film type code. /// Range: `[0, 99]` /// * `prefix` - Prefix to identify film roll. /// Range: `[0, 999999]` /// * `count` - Count, increments once every perfs_per_count perforations. /// Range: `[0, 9999]` /// * `perf_offset` - Offset of frame, in perforations from zero-frame reference mark /// Range: `[0, 119]` /// * `perfs_per_frame` - Number of perforations per frame. Typical values are 1 for 16mm film; 3, 4 or 8 for 35mm film; 5, 8 or 15 for 65mm film. /// Range: `[1, 15]` /// * `perfs_per_count` - Number of perforations per count. Typical values are 20 for 16mm film, 64 for 35mm film, 80 or 120 for 65mm film. /// Range: `[20, 120]` /// /// # Further Reading /// For more information about the interpretation of those fields see /// the following standards and recommended practice publications: /// * SMPTE 254 Motion-Picture Film (35-mm) - Manufacturer-Printed /// Latent Image Identification Information /// * SMPTE 268M File Format for Digital Moving-Picture Exchange (DPX) /// (section 6.1) /// * SMPTE 270 Motion-Picture Film (65-mm) - Manufacturer- Printed /// Latent Image Identification Information /// * SMPTE 271 Motion-Picture Film (16-mm) - Manufacturer- Printed /// Latent Image Identification Information /// #[repr(transparent)] pub struct KeyCode(sys::Imf_KeyCode_t); impl KeyCode { /// Get the film manufacturer code. Valid range `[0, 99]` /// pub fn film_mfc_code(&self) -> i32 { let mut v = 0i32; unsafe { sys::Imf_KeyCode_filmMfcCode(&self.0, &mut v) .into_result() .expect("Unexpected exception from Imf_KeyCode_filmMfcCode"); } v } /// Set the film manufacturer code. Valid range `[0, 99]` /// /// # Errors /// * [`Error::InvalidArgument`] - If `code` is not in the range `[0, 99]` /// /// TODO: Do we want to implement a bounded integer here to specify the range? pub fn set_film_mfc_code(&mut self, code: i32) -> Result<()> { unsafe { sys::Imf_KeyCode_setFilmMfcCode(&mut self.0, code).into_result()?; } Ok(()) } /// Get the film type code. Valid range `[0, 99]` /// pub fn film_type(&self) -> i32 { let mut v = 0i32; unsafe { sys::Imf_KeyCode_filmType(&self.0, &mut v) .into_result() .expect("Unexpected exception from Imf_KeyCode_filmType"); } v } /// Set the film type code. Valid range `[0, 99]` /// /// # Errors /// * [`Error::InvalidArgument`] - If `code` is not in the range `[0, 99]` /// pub fn set_film_type(&mut self, code: i32) -> Result<()> { unsafe { sys::Imf_KeyCode_setFilmType(&mut self.0, code).into_result()?; } Ok(()) } /// Get the prefix code which identifies the film roll. /// Valid range `[0, 999999]` /// pub fn prefix(&self) -> i32 { let mut v = 0i32; unsafe { sys::Imf_KeyCode_filmType(&self.0, &mut v) .into_result() .expect("Unexpected exception from Imf_KeyCode_filmType"); } v } /// Set the prefix code which identifies the film roll. /// Valid range `[0, 999999]` /// /// # Errors /// * [`Error::InvalidArgument`] - If `code` is not in the range `[0, 999999]` /// pub fn set_prefix(&mut self, v: i32) -> Result<()> { unsafe { sys::Imf_KeyCode_setPrefix(&mut self.0, v).into_result()?; } Ok(()) } /// Get the count, which increments every `perfs_per_count` perforations. /// Valid range [0, 9999] /// pub fn count(&self) -> i32 { let mut v = 0i32; unsafe { sys::Imf_KeyCode_filmType(&self.0, &mut v) .into_result() .expect("Unexpected exception from Imf_KeyCode_filmType"); } v } /// Set the count, which increments every `perfs_per_count` perforations. /// Valid range [0, 9999] /// /// # Errors /// * [`Error::InvalidArgument`] - If `count` is not in the range `[0, 9999]` /// pub fn set_count(&mut self, count: i32) -> Result<()> { unsafe { sys::Imf_KeyCode_setCount(&mut self.0, count).into_result()?; } Ok(()) } /// Get the offset of the frame in perforations from the zero-frame reference mark. /// Valid range [0, 119] /// pub fn perf_offset(&self) -> i32 { let mut v = 0i32; unsafe { sys::Imf_KeyCode_filmType(&self.0, &mut v) .into_result() .expect("Unexpected exception from Imf_KeyCode_filmType"); } v } /// Set the offset of the frame in perforations from the zero-frame reference mark. /// Valid range [0, 119] /// /// # Errors /// * [`Error::InvalidArgument`] - If `offset` is not in the range `[0, 119]` /// pub fn set_perf_offset(&mut self, offset: i32) -> Result<()> { unsafe { sys::Imf_KeyCode_setPerfOffset(&mut self.0, offset) .into_result()?; } Ok(()) } /// Get the number of perforations per frame. /// Valid range [1, 15] /// /// Typical values: /// * 1 for 16mm film /// * 3, 4 or 8 for 35mm film /// * 5, 8, or 15 for 65mm film /// pub fn perfs_per_frame(&self) -> i32 { let mut v = 0i32; unsafe { sys::Imf_KeyCode_filmType(&self.0, &mut v) .into_result() .expect("Unexpected exception from Imf_KeyCode_filmType"); } v } /// Set the number of perforations per frame. /// Valid range [1, 15] /// /// Typical values: /// * 1 for 16mm film /// * 3, 4 or 8 for 35mm film /// * 5, 8, or 15 for 65mm film /// /// # Errors /// * [`Error::InvalidArgument`] - If `perfs` is not in the range `[1, 15]` /// pub fn set_perfs_per_frame(&mut self, perfs: i32) -> Result<()> { unsafe { sys::Imf_KeyCode_setPerfsPerFrame(&mut self.0, perfs) .into_result()?; } Ok(()) } /// Get the number of perforations per count. /// Valid range [2, 120] /// /// Typical values: /// * 20 for 16mm film /// * 64 for 35mm film /// * 80 or 120 for 65mm film /// pub fn perfs_per_count(&self) -> i32 { let mut v = 0i32; unsafe { sys::Imf_KeyCode_filmType(&self.0, &mut v) .into_result() .expect("Unexpected exception from Imf_KeyCode_filmType"); } v } /// Set the number of perforations per count. /// Valid range [2, 120] /// /// Typical values: /// * 20 for 16mm film /// * 64 for 35mm film /// * 80 or 120 for 65mm film /// /// # Errors /// * [`Error::InvalidArgument`] - If `perfs` is not in the range `[2, 120]` /// pub fn set_perfs_per_count(&mut self, perfs: i32) -> Result<()> { unsafe { sys::Imf_KeyCode_setPerfsPerCount(&mut self.0, perfs) .into_result()?; } Ok(()) } } impl Default for KeyCode { fn default() -> Self { let mut inner = sys::Imf_KeyCode_t::default(); unsafe { sys::Imf_KeyCode_ctor(&mut inner, 0, 0, 0, 0, 0, 4, 64) .into_result() .expect("Unexpected exception from Imf_KeyCode_ctor"); } KeyCode(inner) } } #[cfg(test)] #[test] fn test_keycode() { let mut k = KeyCode::default(); assert!(k.set_film_mfc_code(-1).is_err()); assert!(k.set_film_mfc_code(1).is_ok()); assert_eq!(k.film_mfc_code(), 1); }
30.589552
146
0.552086
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_keycode() {\n let mut k = KeyCode::default();\n\n assert!(k.set_film_mfc_code(-1).is_err());\n assert!(k.set_film_mfc_code(1).is_ok());\n assert_eq!(k.film_mfc_code(), 1);\n}\n}" ]
f709ffce9a2b031f67b061b537005384c94f5db9
37,865
rs
Rust
src/rgb/schema/schema.rs
inbitcoin/rust-lnpbp
380807a81469a72f233c5dcc2129141280e3318a
[ "MIT" ]
null
null
null
src/rgb/schema/schema.rs
inbitcoin/rust-lnpbp
380807a81469a72f233c5dcc2129141280e3318a
[ "MIT" ]
null
null
null
src/rgb/schema/schema.rs
inbitcoin/rust-lnpbp
380807a81469a72f233c5dcc2129141280e3318a
[ "MIT" ]
null
null
null
// LNP/BP Rust Library // Written in 2020 by // Dr. Maxim Orlovsky <orlovsky@pandoracore.com> // // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to // the public domain worldwide. This software is distributed without // any warranty. // // You should have received a copy of the MIT License // along with this software. // If not, see <https://opensource.org/licenses/MIT>. use std::collections::{BTreeMap, BTreeSet}; use std::io; use bitcoin::hashes::Hash; use super::{ vm, DataFormat, ExtensionSchema, GenesisSchema, OwnedRightType, PublicRightType, SimplicityScript, StateSchema, TransitionSchema, }; use crate::client_side_validation::{ commit_strategy, CommitEncodeWithStrategy, ConsensusCommit, }; use crate::features; // Here we can use usize since encoding/decoding makes sure that it's u16 pub type FieldType = usize; pub type ExtensionType = usize; pub type TransitionType = usize; static MIDSTATE_SHEMA_ID: [u8; 32] = [ 0x81, 0x73, 0x33, 0x7c, 0xcb, 0xc4, 0x8b, 0xd1, 0x24, 0x89, 0x65, 0xcd, 0xd0, 0xcd, 0xb6, 0xc8, 0x7a, 0xa2, 0x14, 0x81, 0x7d, 0x57, 0x39, 0x22, 0x28, 0x90, 0x74, 0x8f, 0x26, 0x75, 0x8e, 0xea, ]; sha256t_hash_newtype!( SchemaId, SchemaIdTag, MIDSTATE_SHEMA_ID, 64, doc = "Commitment-based schema identifier used for committing to the schema type", false ); #[derive(Clone, PartialEq, Debug, Default)] #[cfg_attr( feature = "serde", derive(Serialize, Deserialize), serde(crate = "serde_crate") )] pub struct Schema { #[cfg_attr( feature = "serde", serde(with = "serde_with::rust::display_fromstr") )] pub rgb_features: features::FlagVec, #[cfg_attr( feature = "serde", serde(with = "serde_with::rust::display_fromstr") )] pub root_id: SchemaId, pub field_types: BTreeMap<FieldType, DataFormat>, pub owned_right_types: BTreeMap<OwnedRightType, StateSchema>, pub public_right_types: BTreeSet<PublicRightType>, pub genesis: GenesisSchema, pub extensions: BTreeMap<ExtensionType, ExtensionSchema>, pub transitions: BTreeMap<TransitionType, TransitionSchema>, } impl Schema { #[inline] pub fn schema_id(&self) -> SchemaId { self.clone().consensus_commit() } // TODO: Change with the adoption of Simplicity #[inline] pub fn scripts(&self) -> SimplicityScript { vec![] } } impl ConsensusCommit for Schema { type Commitment = SchemaId; } impl CommitEncodeWithStrategy for Schema { type Strategy = commit_strategy::UsingStrict; } mod strict_encoding { use super::*; use crate::strict_encoding::{ strategies, Error, Strategy, StrictDecode, StrictEncode, }; // TODO: Use derive macros and generalized `tagged_hash!` in the future impl Strategy for SchemaId { type Strategy = strategies::HashFixedBytes; } impl StrictEncode for Schema { type Error = Error; fn strict_encode<E: io::Write>( &self, mut e: E, ) -> Result<usize, Self::Error> { Ok(strict_encode_list!(e; self.rgb_features, self.root_id, self.field_types, self.owned_right_types, self.public_right_types, self.genesis, self.extensions, self.transitions, // We keep this parameter for future script extended info (like ABI) Vec::<u8>::new() )) } } impl StrictDecode for Schema { type Error = Error; fn strict_decode<D: io::Read>(mut d: D) -> Result<Self, Self::Error> { let me = Self { rgb_features: features::FlagVec::strict_decode(&mut d)?, root_id: SchemaId::strict_decode(&mut d)?, field_types: BTreeMap::strict_decode(&mut d)?, owned_right_types: BTreeMap::strict_decode(&mut d)?, public_right_types: BTreeSet::strict_decode(&mut d)?, genesis: GenesisSchema::strict_decode(&mut d)?, extensions: BTreeMap::strict_decode(&mut d)?, transitions: BTreeMap::strict_decode(&mut d)?, }; // We keep this parameter for future script extended info (like ABI) let script = Vec::<u8>::strict_decode(&mut d)?; if !script.is_empty() { Err(Error::UnsupportedDataStructure( "Scripting information is not yet supported", )) } else { Ok(me) } } } } mod _validation { use super::*; use std::collections::BTreeSet; use crate::client_side_validation::Conceal; use crate::rgb::contract::nodes::PublicRights; use crate::rgb::schema::{ script, MetadataStructure, OwnedRightsStructure, PublicRightsStructure, SchemaVerify, }; use crate::rgb::{ validation, AssignmentAction, Assignments, Metadata, Node, NodeId, OwnedRights, OwnedState, ParentOwnedRights, ParentPublicRights, StateTypes, VirtualMachine, }; impl SchemaVerify for Schema { fn schema_verify(&self, root: &Schema) -> validation::Status { let mut status = validation::Status::new(); if root.root_id != SchemaId::default() { status.add_failure(validation::Failure::SchemaRootHierarchy( root.root_id, )); } for (field_type, data_format) in &self.field_types { match root.field_types.get(field_type) { None => status.add_failure( validation::Failure::SchemaRootNoFieldTypeMatch( *field_type, ), ), Some(root_data_format) if root_data_format != data_format => { status.add_failure( validation::Failure::SchemaRootNoFieldTypeMatch( *field_type, ), ) } _ => &status, }; } for (assignments_type, state_schema) in &self.owned_right_types { match root.owned_right_types.get(assignments_type) { None => status.add_failure( validation::Failure::SchemaRootNoOwnedRightTypeMatch(*assignments_type), ), Some(root_state_schema) if root_state_schema != state_schema => status .add_failure(validation::Failure::SchemaRootNoOwnedRightTypeMatch( *assignments_type, )), _ => &status, }; } for valencies_type in &self.public_right_types { match root.public_right_types.contains(valencies_type) { false => status.add_failure( validation::Failure::SchemaRootNoPublicRightTypeMatch( *valencies_type, ), ), _ => &status, }; } status += self.genesis.schema_verify(&root.genesis); for (transition_type, transition_schema) in &self.transitions { if let Some(root_transition_schema) = root.transitions.get(transition_type) { status += transition_schema.schema_verify(root_transition_schema); } else { status.add_failure( validation::Failure::SchemaRootNoTransitionTypeMatch( *transition_type, ), ); } } for (extension_type, extension_schema) in &self.extensions { if let Some(root_extension_schema) = root.extensions.get(extension_type) { status += extension_schema.schema_verify(root_extension_schema); } else { status.add_failure( validation::Failure::SchemaRootNoExtensionTypeMatch( *extension_type, ), ); } } status } } impl Schema { pub fn validate( &self, all_nodes: &BTreeMap<NodeId, &dyn Node>, node: &dyn Node, ) -> validation::Status { let node_id = node.node_id(); let empty_owned_structure = OwnedRightsStructure::default(); let empty_public_structure = PublicRightsStructure::default(); let ( metadata_structure, parent_owned_structure, parent_public_structure, assignments_structure, valencies_structure, ) = match (node.transition_type(), node.extension_type()) { (None, None) => { // Right now we do not have actions to implement; but later // we may have embedded procedures which must be verified // here /* if let Some(procedure) = self.genesis.abi.get(&GenesisAction::NoOp) { } */ ( &self.genesis.metadata, &empty_owned_structure, &empty_public_structure, &self.genesis.owned_rights, &self.genesis.public_rights ) }, (Some(transition_type), None) => { // Right now we do not have actions to implement; but later // we may have embedded procedures which must be verified // here /* if let Some(procedure) = transition_type.abi.get(&TransitionAction::NoOp) { } */ let transition_type = match self.transitions.get(&transition_type) { None => { return validation::Status::with_failure( validation::Failure::SchemaUnknownTransitionType( node_id, transition_type, ), ) } Some(transition_type) => transition_type, }; ( &transition_type.metadata, &transition_type.closes, &empty_public_structure, &transition_type.owned_rights, &transition_type.public_rights, ) } (None, Some(extension_type)) => { // Right now we do not have actions to implement; but later // we may have embedded procedures which must be verified // here /* if let Some(procedure) = extension_type.abi.get(&ExtensionAction::NoOp) { } */ let extension_type = match self.extensions.get(&extension_type) { None => { return validation::Status::with_failure( validation::Failure::SchemaUnknownExtensionType( node_id, extension_type, ), ) } Some(extension_type) => extension_type, }; ( &extension_type.metadata, &empty_owned_structure, &extension_type.extends, &extension_type.owned_rights, &extension_type.extends, ) } _ => unreachable!("Node can't be extension and state transition at the same time"), }; let mut status = validation::Status::new(); let parent_owned_rights = extract_parent_owned_rights( all_nodes, node.parent_owned_rights(), &mut status, ); let parent_public_rights = extract_parent_public_rights( all_nodes, node.parent_public_rights(), &mut status, ); status += self.validate_meta( node_id, node.metadata(), metadata_structure, ); status += self.validate_parent_owned_rights( node_id, &parent_owned_rights, parent_owned_structure, ); status += self.validate_parent_public_rights( node_id, &parent_public_rights, parent_public_structure, ); status += self.validate_owned_rights( node_id, node.owned_rights(), assignments_structure, ); status += self.validate_public_rights( node_id, node.public_rights(), valencies_structure, ); status += self.validate_state_evolution( node_id, node.transition_type(), &parent_owned_rights, node.owned_rights(), node.metadata(), ); status } fn validate_meta( &self, node_id: NodeId, metadata: &Metadata, metadata_structure: &MetadataStructure, ) -> validation::Status { let mut status = validation::Status::new(); metadata .keys() .collect::<BTreeSet<_>>() .difference(&metadata_structure.keys().collect()) .for_each(|field_id| { status.add_failure( validation::Failure::SchemaUnknownFieldType( node_id, **field_id, ), ); }); for (field_type_id, occ) in metadata_structure { let set = metadata.get(field_type_id).cloned().unwrap_or(bset!()); // Checking number of field occurrences if let Err(err) = occ.check(set.len() as u16) { status.add_failure( validation::Failure::SchemaMetaOccurencesError( node_id, *field_type_id, err, ), ); } let field = self.field_types.get(field_type_id) .expect("If the field were absent, the schema would not be able to pass the internal validation and we would not reach this point"); for data in set { status += field.validate(*field_type_id, &data); } } status } fn validate_parent_owned_rights( &self, node_id: NodeId, owned_rights: &OwnedRights, owned_rights_structure: &OwnedRightsStructure, ) -> validation::Status { let mut status = validation::Status::new(); owned_rights .keys() .collect::<BTreeSet<_>>() .difference(&owned_rights_structure.keys().collect()) .for_each(|owned_type_id| { status.add_failure( validation::Failure::SchemaUnknownOwnedRightType( node_id, **owned_type_id, ), ); }); for (owned_type_id, occ) in owned_rights_structure { let len = owned_rights .get(owned_type_id) .map(Assignments::len) .unwrap_or(0); // Checking number of ancestor's assignment occurrences if let Err(err) = occ.check(len as u16) { status.add_failure( validation::Failure::SchemaParentOwnedRightOccurencesError( node_id, *owned_type_id, err, ), ); } } status } fn validate_parent_public_rights( &self, node_id: NodeId, public_rights: &PublicRights, public_rights_structure: &PublicRightsStructure, ) -> validation::Status { let mut status = validation::Status::new(); public_rights.difference(&public_rights_structure).for_each( |public_type_id| { status.add_failure( validation::Failure::SchemaUnknownPublicRightType( node_id, *public_type_id, ), ); }, ); status } fn validate_owned_rights( &self, node_id: NodeId, owned_rights: &OwnedRights, owned_rights_structure: &OwnedRightsStructure, ) -> validation::Status { let mut status = validation::Status::new(); owned_rights .keys() .collect::<BTreeSet<_>>() .difference(&owned_rights_structure.keys().collect()) .for_each(|assignment_type_id| { status.add_failure( validation::Failure::SchemaUnknownOwnedRightType( node_id, **assignment_type_id, ), ); }); for (owned_type_id, occ) in owned_rights_structure { let len = owned_rights .get(owned_type_id) .map(Assignments::len) .unwrap_or(0); // Checking number of assignment occurrences if let Err(err) = occ.check(len as u16) { status.add_failure( validation::Failure::SchemaOwnedRightOccurencesError( node_id, *owned_type_id, err, ), ); } let assignment = &self .owned_right_types .get(owned_type_id) .expect("If the assignment were absent, the schema would not be able to pass the internal validation and we would not reach this point") .format; match owned_rights.get(owned_type_id) { None => {} Some(Assignments::Declarative(set)) => { set.into_iter().for_each(|data| { status += assignment.validate( &node_id, *owned_type_id, data, ) }) } Some(Assignments::DiscreteFiniteField(set)) => { set.into_iter().for_each(|data| { status += assignment.validate( &node_id, *owned_type_id, data, ) }) } Some(Assignments::CustomData(set)) => { set.into_iter().for_each(|data| { status += assignment.validate( &node_id, *owned_type_id, data, ) }) } }; } status } fn validate_public_rights( &self, node_id: NodeId, public_rights: &PublicRights, public_rights_structure: &PublicRightsStructure, ) -> validation::Status { let mut status = validation::Status::new(); public_rights.difference(&public_rights_structure).for_each( |public_type_id| { status.add_failure( validation::Failure::SchemaUnknownPublicRightType( node_id, *public_type_id, ), ); }, ); status } fn validate_state_evolution( &self, node_id: NodeId, transition_type: Option<TransitionType>, parent_owned_rights: &OwnedRights, owned_rights: &OwnedRights, metadata: &Metadata, ) -> validation::Status { let mut status = validation::Status::new(); let owned_right_types: BTreeSet<&OwnedRightType> = parent_owned_rights .keys() .chain(owned_rights.keys()) .collect(); for owned_type_id in owned_right_types { let abi = &self .owned_right_types .get(&owned_type_id) .expect("We already passed owned rights type validation, so can be sure that the type exists") .abi; // If the procedure is not defined, it means no validation // should be performed if let Some(procedure) = abi.get(&AssignmentAction::Validate) { match procedure { script::Procedure::Embedded(proc) => { let mut vm = vm::Embedded::with( transition_type, parent_owned_rights .get(&owned_type_id) .cloned(), owned_rights.get(&owned_type_id).cloned(), metadata.clone(), ); vm.execute(*proc); match vm.pop_stack().and_then(|x| x.downcast_ref::<u8>().cloned()) { None => panic!("LNP/BP core code is hacked: standard procedure must always return 8-bit value"), Some(0) => { // Nothing to do here: 0 signifies successful script execution }, Some(n) => { status.add_failure(validation::Failure::ScriptFailure(node_id, n)); } } } script::Procedure::Simplicity { .. } => { status.add_failure(validation::Failure::SimplicityIsNotSupportedYet); /* Draft of how this could look like: let mut vm = VirtualMachine::new(); vm.push_stack(previous_state.get(&assignment_type).cloned()); vm.push_stack(current_state.get(&assignment_type).cloned()); vm.push_stack(previous_meta.clone()); vm.push_stack(current_meta.clone()); match vm.execute(code.clone(), offset) { Err(_) => {} Ok => match vm.pop_stack() { None => {} Some(value) => {} }, } */ } } } } // We do not validate public rights, since they do not have an // associated state and there is nothing to validate beyond schema status } } fn extract_parent_owned_rights( nodes: &BTreeMap<NodeId, &dyn Node>, parent_owned_rights: &ParentOwnedRights, status: &mut validation::Status, ) -> OwnedRights { let mut owned_rights = OwnedRights::new(); for (id, details) in parent_owned_rights { let parent_node = match nodes.get(id) { None => { status.add_failure(validation::Failure::TransitionAbsent( *id, )); continue; } Some(node) => node, }; fn filter<STATE>( set: &Vec<OwnedState<STATE>>, indexes: &Vec<u16>, ) -> Vec<OwnedState<STATE>> where STATE: StateTypes + Clone, STATE::Confidential: PartialEq + Eq, STATE::Confidential: From<<STATE::Revealed as Conceal>::Confidential>, { set.into_iter() .enumerate() .filter_map(|(index, item)| { if indexes.contains(&(index as u16)) { Some(item.clone()) } else { None } }) .collect() }; for (type_id, indexes) in details { match parent_node.owned_rights_by_type(*type_id) { Some(Assignments::Declarative(set)) => { let set = filter(set, indexes); owned_rights .entry(*type_id) .or_insert(Assignments::Declarative( Default::default(), )) .declarative_state_mut() .map(|state| state.extend(set)); } Some(Assignments::DiscreteFiniteField(set)) => { let set = filter(set, indexes); owned_rights .entry(*type_id) .or_insert(Assignments::DiscreteFiniteField( Default::default(), )) .discrete_state_mut() .map(|state| state.extend(set)); } Some(Assignments::CustomData(set)) => { let set = filter(set, indexes); owned_rights .entry(*type_id) .or_insert(Assignments::CustomData( Default::default(), )) .custom_state_mut() .map(|state| state.extend(set)); } None => { // Presence of the required owned rights type in the // parent node was already validated; we have nothing to // report here } } } } owned_rights } fn extract_parent_public_rights( nodes: &BTreeMap<NodeId, &dyn Node>, parent_public_rights: &ParentPublicRights, status: &mut validation::Status, ) -> PublicRights { let mut public_rights = PublicRights::new(); for (id, public_right_types) in parent_public_rights { if nodes.get(id).is_none() { status.add_failure(validation::Failure::TransitionAbsent(*id)); } else { public_rights.extend(public_right_types); } } public_rights } } #[cfg(test)] pub(crate) mod test { use amplify::Wrapper; use super::*; use crate::bp::tagged_hash; use crate::rgb::schema::*; use crate::strict_encoding::*; pub(crate) fn schema() -> Schema { const FIELD_TICKER: usize = 0; const FIELD_NAME: usize = 1; const FIELD_DESCRIPTION: usize = 2; const FIELD_TOTAL_SUPPLY: usize = 3; const FIELD_ISSUED_SUPPLY: usize = 4; const FIELD_DUST_LIMIT: usize = 5; const FIELD_PRECISION: usize = 6; const FIELD_PRUNE_PROOF: usize = 7; const FIELD_TIMESTAMP: usize = 8; const FIELD_PROOF_OF_BURN: usize = 0x10; const ASSIGNMENT_ISSUE: usize = 0; const ASSIGNMENT_ASSETS: usize = 1; const ASSIGNMENT_PRUNE: usize = 2; const TRANSITION_ISSUE: usize = 0; const TRANSITION_TRANSFER: usize = 1; const TRANSITION_PRUNE: usize = 2; const VALENCIES_DECENTRALIZED_ISSUE: usize = 0; const EXTENSION_DECENTRALIZED_ISSUE: usize = 0; Schema { rgb_features: features::FlagVec::default(), root_id: Default::default(), field_types: bmap! { FIELD_TICKER => DataFormat::String(16), FIELD_NAME => DataFormat::String(256), FIELD_DESCRIPTION => DataFormat::String(1024), FIELD_TOTAL_SUPPLY => DataFormat::Unsigned(Bits::Bit64, 0, core::u64::MAX as u128), FIELD_PRECISION => DataFormat::Unsigned(Bits::Bit64, 0, 18u128), FIELD_ISSUED_SUPPLY => DataFormat::Unsigned(Bits::Bit64, 0, core::u64::MAX as u128), FIELD_DUST_LIMIT => DataFormat::Unsigned(Bits::Bit64, 0, core::u64::MAX as u128), FIELD_PRUNE_PROOF => DataFormat::Bytes(core::u16::MAX), FIELD_TIMESTAMP => DataFormat::Unsigned(Bits::Bit64, 0, core::u64::MAX as u128), FIELD_PROOF_OF_BURN => DataFormat::TxOutPoint }, owned_right_types: bmap! { ASSIGNMENT_ISSUE => StateSchema { format: StateFormat::Declarative, abi: bmap! { AssignmentAction::Validate => script::Procedure::Embedded(script::StandardProcedure::FungibleInflation) } }, ASSIGNMENT_ASSETS => StateSchema { format: StateFormat::DiscreteFiniteField(DiscreteFiniteFieldFormat::Unsigned64bit), abi: bmap! { AssignmentAction::Validate => script::Procedure::Embedded(script::StandardProcedure::NoInflationBySum) } }, ASSIGNMENT_PRUNE => StateSchema { format: StateFormat::Declarative, abi: bmap! { AssignmentAction::Validate => script::Procedure::Embedded(script::StandardProcedure::ProofOfBurn) } } }, public_right_types: bset! { VALENCIES_DECENTRALIZED_ISSUE }, genesis: GenesisSchema { metadata: bmap! { FIELD_TICKER => Occurences::Once, FIELD_NAME => Occurences::Once, FIELD_DESCRIPTION => Occurences::NoneOrOnce, FIELD_TOTAL_SUPPLY => Occurences::Once, FIELD_ISSUED_SUPPLY => Occurences::Once, FIELD_DUST_LIMIT => Occurences::NoneOrOnce, FIELD_PRECISION => Occurences::Once, FIELD_TIMESTAMP => Occurences::Once }, owned_rights: bmap! { ASSIGNMENT_ISSUE => Occurences::NoneOrOnce, ASSIGNMENT_ASSETS => Occurences::NoneOrMore, ASSIGNMENT_PRUNE => Occurences::NoneOrMore }, public_rights: bset! { VALENCIES_DECENTRALIZED_ISSUE }, abi: bmap! {}, }, extensions: bmap! { EXTENSION_DECENTRALIZED_ISSUE => ExtensionSchema { extends: bset! { VALENCIES_DECENTRALIZED_ISSUE }, metadata: bmap! { FIELD_ISSUED_SUPPLY => Occurences::Once, FIELD_PROOF_OF_BURN => Occurences::OnceOrMore }, owned_rights: bmap! { ASSIGNMENT_ASSETS => Occurences::NoneOrMore }, public_rights: bset! { }, abi: bmap! {}, } }, transitions: bmap! { TRANSITION_ISSUE => TransitionSchema { closes: bmap! { ASSIGNMENT_ISSUE => Occurences::Once }, metadata: bmap! { FIELD_ISSUED_SUPPLY => Occurences::Once }, owned_rights: bmap! { ASSIGNMENT_ISSUE => Occurences::NoneOrOnce, ASSIGNMENT_PRUNE => Occurences::NoneOrMore, ASSIGNMENT_ASSETS => Occurences::NoneOrMore }, public_rights: bset! {}, abi: bmap! {} }, TRANSITION_TRANSFER => TransitionSchema { closes: bmap! { ASSIGNMENT_ASSETS => Occurences::OnceOrMore }, metadata: bmap! {}, owned_rights: bmap! { ASSIGNMENT_ASSETS => Occurences::NoneOrMore }, public_rights: bset! {}, abi: bmap! {} }, TRANSITION_PRUNE => TransitionSchema { closes: bmap! { ASSIGNMENT_PRUNE => Occurences::OnceOrMore, ASSIGNMENT_ASSETS => Occurences::OnceOrMore }, metadata: bmap! { FIELD_PRUNE_PROOF => Occurences::NoneOrMore }, owned_rights: bmap! { ASSIGNMENT_PRUNE => Occurences::NoneOrMore, ASSIGNMENT_ASSETS => Occurences::NoneOrMore }, public_rights: bset! {}, abi: bmap! {} } }, } } #[test] fn test_schema_id_midstate() { let midstate = tagged_hash::Midstate::with(b"rgb:schema"); assert_eq!(midstate.into_inner(), MIDSTATE_SHEMA_ID); } #[test] fn test_schema_encoding_decoding() { let schema = schema(); let encoded = strict_encode(&schema).unwrap(); let encoded_standard: Vec<u8> = vec![ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0, 0, 0, 4, 16, 0, 1, 0, 4, 0, 1, 2, 0, 4, 0, 4, 3, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 255, 255, 255, 255, 4, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 255, 255, 255, 255, 5, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 255, 255, 255, 255, 6, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 18, 0, 0, 0, 0, 0, 0, 0, 7, 0, 5, 255, 255, 8, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 255, 255, 255, 255, 16, 0, 32, 3, 0, 0, 0, 0, 1, 0, 0, 255, 2, 1, 0, 1, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 255, 255, 255, 255, 1, 0, 0, 255, 1, 2, 0, 0, 1, 0, 0, 255, 16, 1, 0, 0, 0, 8, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 2, 0, 0, 0, 1, 0, 3, 0, 1, 0, 1, 0, 4, 0, 1, 0, 1, 0, 5, 0, 0, 0, 1, 0, 6, 0, 1, 0, 1, 0, 8, 0, 1, 0, 1, 0, 3, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 255, 255, 2, 0, 0, 0, 255, 255, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 4, 0, 1, 0, 1, 0, 16, 0, 1, 0, 255, 255, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 255, 255, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 1, 0, 4, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 3, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 255, 255, 2, 0, 0, 0, 255, 255, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 255, 255, 1, 0, 1, 0, 0, 0, 255, 255, 0, 0, 0, 0, 0, 0, 2, 0, 1, 0, 7, 0, 0, 0, 255, 255, 2, 0, 1, 0, 1, 0, 255, 255, 2, 0, 1, 0, 255, 255, 2, 0, 1, 0, 0, 0, 255, 255, 2, 0, 0, 0, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, ]; assert_eq!(encoded, encoded_standard); let decoded = Schema::strict_decode(&encoded[..]).unwrap(); assert_eq!(decoded, schema); } }
38.677222
156
0.458497
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_schema_id_midstate() {\n let midstate = tagged_hash::Midstate::with(b\"rgb:schema\");\n assert_eq!(midstate.into_inner(), MIDSTATE_SHEMA_ID);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_schema_encoding_decoding() {\n let schema = schema();\n let encoded = strict_encode(&schema).unwrap();\n let encoded_standard: Vec<u8> = vec![\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0, 0, 0, 4, 16, 0, 1, 0, 4,\n 0, 1, 2, 0, 4, 0, 4, 3, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255,\n 255, 255, 255, 255, 255, 255, 4, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0,\n 255, 255, 255, 255, 255, 255, 255, 255, 5, 0, 0, 8, 0, 0, 0, 0, 0,\n 0, 0, 0, 255, 255, 255, 255, 255, 255, 255, 255, 6, 0, 0, 8, 0, 0,\n 0, 0, 0, 0, 0, 0, 18, 0, 0, 0, 0, 0, 0, 0, 7, 0, 5, 255, 255, 8, 0,\n 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 255, 255, 255,\n 255, 16, 0, 32, 3, 0, 0, 0, 0, 1, 0, 0, 255, 2, 1, 0, 1, 0, 8, 0,\n 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 255, 255, 255, 255, 1, 0,\n 0, 255, 1, 2, 0, 0, 1, 0, 0, 255, 16, 1, 0, 0, 0, 8, 0, 0, 0, 1, 0,\n 1, 0, 1, 0, 1, 0, 1, 0, 2, 0, 0, 0, 1, 0, 3, 0, 1, 0, 1, 0, 4, 0,\n 1, 0, 1, 0, 5, 0, 0, 0, 1, 0, 6, 0, 1, 0, 1, 0, 8, 0, 1, 0, 1, 0,\n 3, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 255, 255, 2, 0, 0, 0, 255, 255,\n 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 4, 0, 1, 0, 1, 0, 16, 0,\n 1, 0, 255, 255, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 255, 255, 0, 0, 0, 0,\n 0, 0, 3, 0, 0, 0, 1, 0, 4, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0,\n 3, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 255, 255, 2, 0, 0, 0, 255, 255,\n 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 255, 255, 1, 0, 1,\n 0, 0, 0, 255, 255, 0, 0, 0, 0, 0, 0, 2, 0, 1, 0, 7, 0, 0, 0, 255,\n 255, 2, 0, 1, 0, 1, 0, 255, 255, 2, 0, 1, 0, 255, 255, 2, 0, 1, 0,\n 0, 0, 255, 255, 2, 0, 0, 0, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0,\n ];\n assert_eq!(encoded, encoded_standard);\n\n let decoded = Schema::strict_decode(&encoded[..]).unwrap();\n assert_eq!(decoded, schema);\n }\n}" ]
f70a030305a1f4a3dfe65533d51a15ceff801d46
5,763
rs
Rust
src/format/ixf.rs
lion328/sc3ktools
0cf3305e51d1d02113c211435776f1c699381cbe
[ "Apache-2.0" ]
2
2019-05-14T01:36:21.000Z
2019-08-18T05:32:56.000Z
src/format/ixf.rs
lion328/sc3ktools
0cf3305e51d1d02113c211435776f1c699381cbe
[ "Apache-2.0" ]
null
null
null
src/format/ixf.rs
lion328/sc3ktools
0cf3305e51d1d02113c211435776f1c699381cbe
[ "Apache-2.0" ]
1
2018-12-02T19:14:09.000Z
2018-12-02T19:14:09.000Z
use std::io::{self, Read, Write, Cursor}; use error::*; use byteorder::{ReadBytesExt, WriteBytesExt, LE}; pub const IXF_FILE_HEADER_IDENTIFIER: &[u8] = &[0xD7, 0x81, 0xC3, 0x80]; pub const IXF_FILE_RECORD_LENGTH: usize = 20; pub const IXF_FILE_NULL_RECORD: &[u8] = &[0u8; IXF_FILE_RECORD_LENGTH]; #[derive(Debug, PartialEq)] pub struct IXFFile { pub records: Vec<IXFRecord>, } #[derive(Debug, PartialEq)] pub struct IXFRecord { pub type_id: u32, pub group_id: u32, pub instance_id: u32, pub body: Vec<u8>, } impl IXFFile { pub fn parse(data: &[u8], skip_bad: bool) -> Result<IXFFile> { let mut ident = [0u8; 4]; let mut stream = io::Cursor::new(data); stream.read_exact(&mut ident)?; if ident != IXF_FILE_HEADER_IDENTIFIER { return Err(Error::IXFFile(format!("invalid header: {:x?}", ident))); } let mut records = Vec::new(); loop { let type_id = stream.read_u32::<LE>()?; let group_id = stream.read_u32::<LE>()?; let instance_id = stream.read_u32::<LE>()?; if type_id == 0 && group_id == 0 && instance_id == 0 { break } let address = stream.read_u32::<LE>()? as usize; let length = stream.read_u32::<LE>()? as usize; if address >= data.len() || address > data.len() - length { if skip_bad { continue } return Err(Error::IXFFile( format!("record out of bounds: address 0x{:X?}, length 0x{:X?}, max: 0x{:X?}", address, length, data.len() - 1))); } records.push(IXFRecord { type_id: type_id, group_id: group_id, instance_id: instance_id, body: data[address..address + length].to_vec(), }); } Ok(IXFFile { records: records, }) } pub fn as_vec(&self) -> Result<Vec<u8>> { let mut cursor = Cursor::new(Vec::new()); cursor.write_all(IXF_FILE_HEADER_IDENTIFIER)?; let mut offset = IXF_FILE_HEADER_IDENTIFIER.len() + IXF_FILE_RECORD_LENGTH * (self.records.len() + 1); for elem in self.records.iter() { cursor.write_u32::<LE>(elem.type_id)?; cursor.write_u32::<LE>(elem.group_id)?; cursor.write_u32::<LE>(elem.instance_id)?; cursor.write_u32::<LE>(offset as u32)?; cursor.write_u32::<LE>(elem.body.len() as u32)?; offset += elem.body.len(); } cursor.write_all(IXF_FILE_NULL_RECORD)?; for elem in self.records.iter() { cursor.write_all(&elem.body)?; } Ok(cursor.into_inner()) } } #[cfg(test)] mod tests { use super::*; #[test] #[should_panic] fn invalid_header() { let data = [0xDE, 0xAD, 0xBA, 0xBE, 0xFA, 0x11]; IXFFile::parse(&data, true).unwrap(); } #[test] fn normal() { let data = [ 0xD7, 0x81, 0xC3, 0x80, 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0, 0x29, 0x99, 0x79, 0x24, 0x28, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xBE, 0xEF, 0xCA, 0xCE, ]; let record = { let mut records = IXFFile::parse(&data, false).unwrap().records; assert_eq!(records.len(), 1); records.pop().unwrap() }; assert_eq!(record.type_id, 0x78563412); assert_eq!(record.group_id, 0xF0DEBC9A); assert_eq!(record.instance_id, 0x24799929); assert_eq!(record.body, &data[0x28..]); } #[test] #[should_panic] fn bad_record() { let data = [ 0xD7, 0x81, 0xC3, 0x80, 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0, 0x29, 0x99, 0x79, 0x24, 0x28, 0xFF, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, ]; IXFFile::parse(&data, false).unwrap(); } #[test] fn bad_record_ignore() { let data = [ 0xD7, 0x81, 0xC3, 0x80, 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0, 0x29, 0x99, 0x79, 0x24, 0x28, 0xFF, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xBE, 0xEF, 0xCA, 0xCE, ]; let records = IXFFile::parse(&data, true).unwrap().records; assert_eq!(records.len(), 0); } #[test] fn reencode() { let data = [ 0xD7, 0x81, 0xC3, 0x80, 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0, 0x29, 0x99, 0x79, 0x24, 0x2C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Yes, I missing this one. Still, they can be overlapped. I think the last 2*4 bytes of null record // can be removed. 0x00, 0x00, 0x00, 0x00, 0xBE, 0xEF, 0xCA, 0xCE, ]; let parsed = IXFFile::parse(&data, false).unwrap(); assert_eq!(parsed.as_vec().unwrap(), data.to_vec()); } }
27.975728
115
0.50321
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn invalid_header() {\n let data = [0xDE, 0xAD, 0xBA, 0xBE, 0xFA, 0x11];\n IXFFile::parse(&data, true).unwrap();\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn normal() {\n let data = [\n 0xD7, 0x81, 0xC3, 0x80,\n 0x12, 0x34, 0x56, 0x78,\n 0x9A, 0xBC, 0xDE, 0xF0,\n 0x29, 0x99, 0x79, 0x24,\n 0x28, 0x00, 0x00, 0x00,\n 0x04, 0x00, 0x00, 0x00,\n\n 0x00, 0x00, 0x00, 0x00,\n 0x00, 0x00, 0x00, 0x00,\n 0x00, 0x00, 0x00, 0x00,\n 0x00, 0x00, 0x00, 0x00,\n\n 0xBE, 0xEF, 0xCA, 0xCE,\n ];\n \n let record = {\n let mut records = IXFFile::parse(&data, false).unwrap().records;\n assert_eq!(records.len(), 1);\n records.pop().unwrap()\n };\n\n assert_eq!(record.type_id, 0x78563412);\n assert_eq!(record.group_id, 0xF0DEBC9A);\n assert_eq!(record.instance_id, 0x24799929);\n assert_eq!(record.body, &data[0x28..]);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn bad_record() {\n let data = [\n 0xD7, 0x81, 0xC3, 0x80,\n 0x12, 0x34, 0x56, 0x78,\n 0x9A, 0xBC, 0xDE, 0xF0,\n 0x29, 0x99, 0x79, 0x24,\n 0x28, 0xFF, 0x00, 0x00,\n 0x04, 0x00, 0x00, 0x00,\n\n 0x00, 0x00, 0x00, 0x00,\n 0x00, 0x00, 0x00, 0x00,\n 0x00, 0x00, 0x00, 0x00,\n 0x00, 0x00, 0x00, 0x00,\n ];\n \n IXFFile::parse(&data, false).unwrap();\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn bad_record_ignore() {\n let data = [\n 0xD7, 0x81, 0xC3, 0x80,\n 0x12, 0x34, 0x56, 0x78,\n 0x9A, 0xBC, 0xDE, 0xF0,\n 0x29, 0x99, 0x79, 0x24,\n 0x28, 0xFF, 0x00, 0x00,\n 0x04, 0x00, 0x00, 0x00,\n\n 0x00, 0x00, 0x00, 0x00,\n 0x00, 0x00, 0x00, 0x00,\n 0x00, 0x00, 0x00, 0x00,\n 0x00, 0x00, 0x00, 0x00,\n\n 0xBE, 0xEF, 0xCA, 0xCE,\n ];\n \n let records = IXFFile::parse(&data, true).unwrap().records;\n assert_eq!(records.len(), 0);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn reencode() {\n let data = [\n 0xD7, 0x81, 0xC3, 0x80,\n 0x12, 0x34, 0x56, 0x78,\n 0x9A, 0xBC, 0xDE, 0xF0,\n 0x29, 0x99, 0x79, 0x24,\n 0x2C, 0x00, 0x00, 0x00,\n 0x04, 0x00, 0x00, 0x00,\n\n 0x00, 0x00, 0x00, 0x00,\n 0x00, 0x00, 0x00, 0x00,\n 0x00, 0x00, 0x00, 0x00,\n 0x00, 0x00, 0x00, 0x00,\n // Yes, I missing this one. Still, they can be overlapped. I think the last 2*4 bytes of null record\n // can be removed.\n 0x00, 0x00, 0x00, 0x00,\n\n 0xBE, 0xEF, 0xCA, 0xCE,\n ];\n\n let parsed = IXFFile::parse(&data, false).unwrap();\n\n assert_eq!(parsed.as_vec().unwrap(), data.to_vec());\n }\n}" ]
f70a14fc758518f48f6f5ea2bc43e4a385188151
33,096
rs
Rust
src/gen.rs
gudjonragnar/rsgen-avro
c46fd1eb73239e151f7cb38e607c68a096ea3d62
[ "MIT" ]
null
null
null
src/gen.rs
gudjonragnar/rsgen-avro
c46fd1eb73239e151f7cb38e607c68a096ea3d62
[ "MIT" ]
null
null
null
src/gen.rs
gudjonragnar/rsgen-avro
c46fd1eb73239e151f7cb38e607c68a096ea3d62
[ "MIT" ]
null
null
null
use std::collections::{HashMap, VecDeque}; use std::fs; use std::io::prelude::*; use avro_rs::{schema::RecordField, Schema}; use crate::error::{Error, Result}; use crate::templates::*; /// Represents a schema input source. pub enum Source<'a> { /// An Avro schema enum from `avro-rs` crate. Schema(&'a Schema), /// An Avro schema string in json format. SchemaStr(&'a str), /// Pattern for files containing Avro schemas in json format. GlobPattern(&'a str), } /// The main component of this library. /// It is stateless and can be reused many times. pub struct Generator { templater: Templater, } impl Generator { /// Create a new `Generator` through a builder with default config. pub fn new() -> Result<Generator> { GeneratorBuilder::new().build() } /// Returns a fluid builder for custom `Generator` instantiation. pub fn builder() -> GeneratorBuilder { GeneratorBuilder::new() } /// Generates Rust code from an Avro schema `Source`. /// Writes all generated types to the ouput. pub fn gen(&self, source: &Source, output: &mut impl Write) -> Result<()> { if self.templater.nullable { output.write_all(DESER_NULLABLE.as_bytes())?; } match source { Source::Schema(schema) => { let mut deps = deps_stack(schema, vec![]); self.gen_in_order(&mut deps, output)?; } Source::SchemaStr(raw_schema) => { let schema = Schema::parse_str(&raw_schema)?; let mut deps = deps_stack(&schema, vec![]); self.gen_in_order(&mut deps, output)?; } Source::GlobPattern(pattern) => { let mut raw_schemas = vec![]; for entry in glob::glob(pattern)? { let path = entry.map_err(|e| e.into_error())?; if !path.is_dir() { raw_schemas.push(fs::read_to_string(path)?); } } let schemas = &raw_schemas.iter().map(|s| s.as_str()).collect::<Vec<_>>(); let schemas = Schema::parse_list(&schemas)?; let mut deps = schemas .iter() .fold(vec![], |deps, schema| deps_stack(&schema, deps)); self.gen_in_order(&mut deps, output)?; } } Ok(()) } /// Given an Avro `schema`: /// * Find its ordered, nested dependencies with `deps_stack(schema)` /// * Pops sub-schemas and generate appropriate Rust types /// * Keeps tracks of nested schema->name with `GenState` mapping /// * Appends generated Rust types to the output fn gen_in_order(&self, deps: &mut Vec<Schema>, output: &mut impl Write) -> Result<()> { let mut gs = GenState::new(); while let Some(s) = deps.pop() { match s { // Simply generate code Schema::Fixed { .. } => { let code = &self.templater.str_fixed(&s)?; output.write_all(code.as_bytes())? } Schema::Enum { .. } => { let code = &self.templater.str_enum(&s)?; output.write_all(code.as_bytes())? } // Generate code with potentially nested types Schema::Record { .. } => { let code = &self.templater.str_record(&s, &gs)?; output.write_all(code.as_bytes())? } // Register inner type for it to be used as a nested type later Schema::Array(ref inner) => { let type_str = array_type(inner, &gs)?; gs.put_type(&s, type_str) } Schema::Map(ref inner) => { let type_str = map_type(inner, &gs)?; gs.put_type(&s, type_str) } Schema::Union(ref union) => { // Generate custom enum with potentially nested types if (union.is_nullable() && union.variants().len() > 2) || (!union.is_nullable() && union.variants().len() > 1) { let code = &self.templater.str_union_enum(&s, &gs)?; output.write_all(code.as_bytes())? } // Register inner union for it to be used as a nested type later let type_str = union_type(union, &gs, true)?; gs.put_type(&s, type_str) } _ => Err(Error::Schema(format!("Not a valid root schema: {:?}", s)))?, } } Ok(()) } } /// Utility function to find the ordered, nested dependencies of an Avro `schema`. /// Explores nested `schema`s in a breadth-first fashion, pushing them on a stack /// at the same time in order to have them ordered. /// It is similar to traversing the `schema` tree in a post-order fashion. fn deps_stack(schema: &Schema, mut deps: Vec<Schema>) -> Vec<Schema> { fn push_unique(deps: &mut Vec<Schema>, s: Schema) { if !deps.contains(&s) { deps.push(s); } } let mut q = VecDeque::new(); q.push_back(schema); while !q.is_empty() { let s = q.pop_front().unwrap(); match s { // No nested schemas, add them to the result stack Schema::Enum { .. } => push_unique(&mut deps, s.clone()), Schema::Fixed { .. } => push_unique(&mut deps, s.clone()), Schema::Decimal { inner, .. } if matches!(**inner, Schema::Fixed { .. }) => { push_unique(&mut deps, s.clone()) } // Explore the record fields for potentially nested schemas Schema::Record { fields, .. } => { push_unique(&mut deps, s.clone()); let by_pos = fields .iter() .map(|f| (f.position, f)) .collect::<HashMap<_, _>>(); let mut i = 0; while let Some(RecordField { schema: sr, .. }) = by_pos.get(&i) { match sr { // No nested schemas, add them to the result stack Schema::Fixed { .. } => push_unique(&mut deps, sr.clone()), Schema::Enum { .. } => push_unique(&mut deps, sr.clone()), // Push to the exploration queue for further checks Schema::Record { .. } => q.push_back(sr), // Push to the exploration queue, depending on the inner schema format Schema::Map(sc) | Schema::Array(sc) => match &**sc { Schema::Fixed { .. } | Schema::Enum { .. } | Schema::Record { .. } | Schema::Map(..) | Schema::Array(..) | Schema::Union(..) => q.push_back(&**sc), _ => (), }, Schema::Union(union) => { if (union.is_nullable() && union.variants().len() > 2) || (!union.is_nullable() && union.variants().len() > 1) { push_unique(&mut deps, sr.clone()); } union.variants().iter().for_each(|sc| match sc { Schema::Fixed { .. } | Schema::Enum { .. } | Schema::Record { .. } | Schema::Map(..) | Schema::Array(..) | Schema::Union(..) => { q.push_back(sc); push_unique(&mut deps, sc.clone()); } _ => (), }); } _ => (), }; i += 1; } } // Depending on the inner schema type ... Schema::Map(sc) | Schema::Array(sc) => match &**sc { // ... Needs further checks, push to the exploration queue Schema::Fixed { .. } | Schema::Enum { .. } | Schema::Record { .. } | Schema::Map(..) | Schema::Array(..) | Schema::Union(..) => q.push_back(&**sc), // ... Not nested, can be pushed to the result stack _ => push_unique(&mut deps, s.clone()), }, Schema::Union(union) => { if (union.is_nullable() && union.variants().len() > 2) || (!union.is_nullable() && union.variants().len() > 1) { push_unique(&mut deps, s.clone()); } union.variants().iter().for_each(|sc| match sc { // ... Needs further checks, push to the exploration queue Schema::Fixed { .. } | Schema::Enum { .. } | Schema::Record { .. } | Schema::Map(..) | Schema::Array(..) | Schema::Union(..) => q.push_back(sc), // ... Not nested, can be pushed to the result stack _ => push_unique(&mut deps, s.clone()), }); } // Ignore all other schema formats _ => (), } } deps } /// A builder class to customize `Generator`. pub struct GeneratorBuilder { precision: usize, nullable: bool, use_variant_access: bool, use_avro_rs_unions: bool, } impl GeneratorBuilder { /// Creates a new `GeneratorBuilder`. pub fn new() -> GeneratorBuilder { GeneratorBuilder { precision: 3, nullable: false, use_variant_access: false, use_avro_rs_unions: false, } } /// Sets the precision for default values of f32/f64 fields. pub fn precision(mut self, precision: usize) -> GeneratorBuilder { self.precision = precision; self } /// Puts default value when deserializing `null` field. /// Doesn't apply to union fields ["null", "Foo"], which are `Option<Foo>`. pub fn nullable(mut self, nullable: bool) -> GeneratorBuilder { self.nullable = nullable; self } /// Adds variant_access_derive to the enums generated from union types. pub fn use_variant_access(mut self, use_variant_access: bool) -> GeneratorBuilder { self.use_variant_access = use_variant_access; self } /// Adds support for deserializing union types from the `avro-rs` crate. /// Only necessary for unions of 3 or more types or 2-type unions without "null". /// Note that only int, long, float, double, and boolean values are currently supported. pub fn use_avro_rs_unions(mut self, use_avro_rs_unions: bool) -> GeneratorBuilder { self.use_avro_rs_unions = use_avro_rs_unions; self } /// Create a `Generator` with the builder parameters. pub fn build(self) -> Result<Generator> { let mut templater = Templater::new()?; templater.precision = self.precision; templater.nullable = self.nullable; templater.use_variant_access = self.use_variant_access; templater.use_avro_rs_unions = self.use_avro_rs_unions; Ok(Generator { templater }) } } #[cfg(test)] mod tests { use avro_rs::schema::Name; use super::*; macro_rules! assert_schema_gen ( ($generator:expr, $expected:expr, $raw_schema:expr) => ( let schema = Schema::parse_str($raw_schema).unwrap(); let source = Source::Schema(&schema); let mut buf = vec![]; $generator.gen(&source, &mut buf).unwrap(); let res = String::from_utf8(buf).unwrap(); assert_eq!($expected, &res); ); ); #[test] fn simple() { let raw_schema = r#" { "type": "record", "name": "test", "fields": [ {"name": "a", "type": "long", "default": 42}, {"name": "b", "type": "string"} ] } "#; let expected = " #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct Test { pub a: i64, pub b: String, } impl Default for Test { fn default() -> Test { Test { a: 42, b: String::default(), } } } "; let g = Generator::new().unwrap(); assert_schema_gen!(g, expected, raw_schema); } #[test] fn complex() { let raw_schema = r#" { "type": "record", "name": "User", "doc": "Hi there.", "fields": [ {"name": "name", "type": "string", "default": ""}, {"name": "favorite_number", "type": "int", "default": 7}, {"name": "likes_pizza", "type": "boolean", "default": false}, {"name": "oye", "type": "float", "default": 1.1}, {"name": "aa-i32", "type": {"type": "array", "items": {"type": "array", "items": "int"}}, "default": [[0], [12, -1]]} ] } "#; let expected = r#" /// Hi there. #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct User { pub name: String, pub favorite_number: i32, pub likes_pizza: bool, pub oye: f32, #[serde(rename = "aa-i32")] pub aa_i32: Vec<Vec<i32>>, } impl Default for User { fn default() -> User { User { name: "".to_owned(), favorite_number: 7, likes_pizza: false, oye: 1.100, aa_i32: vec![vec![0], vec![12, -1]], } } } "#; let g = Generator::new().unwrap(); assert_schema_gen!(g, expected, raw_schema); } #[test] fn optional_array() { let raw_schema = r#" { "name": "Snmp", "type": "record", "fields": [ { "name": "v1", "type": [ "null", { "name": "V1", "type": "record", "fields": [ { "name": "pdu", "type": [ "null", { "name": "TrapV1", "type": "record", "fields": [ { "name": "var", "type": ["null", { "type": "array", "items": { "name": "Variable", "type": "record", "fields": [ { "name": "oid", "type": ["null", { "type":"array", "items": "long" } ], "default": null }, { "name": "val", "type": ["null", "string"], "default": null } ], "default": {} } } ], "default": null } ] } ], "default": null } ] } ], "default": null } ], "default": {} } "#; let expected = r#" #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct Variable { pub oid: Option<Vec<i64>>, pub val: Option<String>, } impl Default for Variable { fn default() -> Variable { Variable { oid: None, val: None, } } } #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct TrapV1 { pub var: Option<Vec<Variable>>, } impl Default for TrapV1 { fn default() -> TrapV1 { TrapV1 { var: None, } } } #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct V1 { pub pdu: Option<TrapV1>, } impl Default for V1 { fn default() -> V1 { V1 { pdu: None, } } } #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct Snmp { pub v1: Option<V1>, } impl Default for Snmp { fn default() -> Snmp { Snmp { v1: None, } } } "#; let g = Generator::new().unwrap(); assert_schema_gen!(g, expected, raw_schema); } #[test] fn optional_arrays() { let raw_schema = r#" { "type": "record", "name": "KsqlDataSourceSchema", "namespace": "io.confluent.ksql.avro_schemas", "fields": [ { "name": "ID", "type": ["null", "string"], "default": null }, { "name": "GROUP_IDS", "type": ["null", { "type": "array", "items": ["null", "string"] } ], "default": null }, { "name": "GROUP_NAMES", "type": ["null", { "type": "array", "items": ["null", "string"] } ], "default": null } ] } "#; let expected = r#" #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct KsqlDataSourceSchema { #[serde(rename = "ID")] pub id: Option<String>, #[serde(rename = "GROUP_IDS")] pub group_ids: Option<Vec<Option<String>>>, #[serde(rename = "GROUP_NAMES")] pub group_names: Option<Vec<Option<String>>>, } impl Default for KsqlDataSourceSchema { fn default() -> KsqlDataSourceSchema { KsqlDataSourceSchema { id: None, group_ids: None, group_names: None, } } } "#; let g = Generator::new().unwrap(); assert_schema_gen!(g, expected, raw_schema); } #[test] fn multi_valued_union() { let raw_schema = r#" { "type": "record", "name": "Contact", "namespace": "com.test", "fields": [ { "name": "extra", "type": "map", "values" : [ "null", "string", "long", "double", "boolean" ] } ] } "#; let expected = r#" /// Auto-generated type for unnamed Avro union variants. #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] pub enum UnionStringLongDoubleBoolean { String(String), Long(i64), Double(f64), Boolean(bool), } #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct Contact { pub extra: ::std::collections::HashMap<String, Option<UnionStringLongDoubleBoolean>>, } impl Default for Contact { fn default() -> Contact { Contact { extra: ::std::collections::HashMap::new(), } } } "#; let g = Generator::new().unwrap(); assert_schema_gen!(g, expected, raw_schema); let raw_schema = r#" { "type": "record", "name": "AvroFileId", "fields": [ { "name": "id", "type": [ "string", { "type": "record", "name": "AvroShortUUID", "fields": [ { "name": "mostBits", "type": "long" }, { "name": "leastBits", "type": "long" } ] } ] } ] } "#; let expected = r#" #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct AvroShortUuid { #[serde(rename = "mostBits")] pub most_bits: i64, #[serde(rename = "leastBits")] pub least_bits: i64, } impl Default for AvroShortUuid { fn default() -> AvroShortUuid { AvroShortUuid { most_bits: 0, least_bits: 0, } } } /// Auto-generated type for unnamed Avro union variants. #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] pub enum UnionStringAvroShortUuid { String(String), AvroShortUuid(AvroShortUuid), } #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct AvroFileId { pub id: UnionStringAvroShortUuid, } impl Default for AvroFileId { fn default() -> AvroFileId { AvroFileId { id: UnionStringAvroShortUuid::String(String::default()), } } } "#; let g = Generator::new().unwrap(); assert_schema_gen!(g, expected, raw_schema); } #[test] fn multi_valued_union_with_variant_access() { let raw_schema = r#" { "type": "record", "name": "Contact", "namespace": "com.test", "fields": [ { "name": "extra", "type": "map", "values" : [ "null", "string", "long", "double", "boolean" ] } ] } "#; let expected = r#" /// Auto-generated type for unnamed Avro union variants. #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize, variant_access_derive::VariantAccess)] pub enum UnionStringLongDoubleBoolean { String(String), Long(i64), Double(f64), Boolean(bool), } #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct Contact { pub extra: ::std::collections::HashMap<String, Option<UnionStringLongDoubleBoolean>>, } impl Default for Contact { fn default() -> Contact { Contact { extra: ::std::collections::HashMap::new(), } } } "#; let g = Generator::builder() .use_variant_access(true) .build() .unwrap(); assert_schema_gen!(g, expected, raw_schema); let raw_schema = r#" { "type": "record", "name": "AvroFileId", "fields": [ { "name": "id", "type": [ "string", { "type": "record", "name": "AvroShortUUID", "fields": [ { "name": "mostBits", "type": "long" }, { "name": "leastBits", "type": "long" } ] } ] } ] } "#; let expected = r#" #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct AvroShortUuid { #[serde(rename = "mostBits")] pub most_bits: i64, #[serde(rename = "leastBits")] pub least_bits: i64, } impl Default for AvroShortUuid { fn default() -> AvroShortUuid { AvroShortUuid { most_bits: 0, least_bits: 0, } } } /// Auto-generated type for unnamed Avro union variants. #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize, variant_access_derive::VariantAccess)] pub enum UnionStringAvroShortUuid { String(String), AvroShortUuid(AvroShortUuid), } #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct AvroFileId { pub id: UnionStringAvroShortUuid, } impl Default for AvroFileId { fn default() -> AvroFileId { AvroFileId { id: UnionStringAvroShortUuid::String(String::default()), } } } "#; let g = Generator::builder() .use_variant_access(true) .build() .unwrap(); assert_schema_gen!(g, expected, raw_schema); } #[test] fn multi_valued_union_with_avro_rs_unions() { let raw_schema = r#" { "type": "record", "name": "Contact", "namespace": "com.test", "fields": [ { "name": "extra", "type": "map", "values" : [ "null", "string", "long", "double", "boolean" ] } ] } "#; let expected = r#" /// Auto-generated type for unnamed Avro union variants. #[derive(Debug, PartialEq, Clone, serde::Serialize)] pub enum UnionStringLongDoubleBoolean { String(String), Long(i64), Double(f64), Boolean(bool), } impl<'de> serde::Deserialize<'de> for UnionStringLongDoubleBoolean { fn deserialize<D>(deserializer: D) -> Result<UnionStringLongDoubleBoolean, D::Error> where D: serde::Deserializer<'de>, { /// Serde visitor for the auto-generated unnamed Avro union type. struct UnionStringLongDoubleBooleanVisitor; impl<'de> serde::de::Visitor<'de> for UnionStringLongDoubleBooleanVisitor { type Value = UnionStringLongDoubleBoolean; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str("a UnionStringLongDoubleBoolean") } fn visit_i64<E>(self, value: i64) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(UnionStringLongDoubleBoolean::Long(value)) } fn visit_f64<E>(self, value: f64) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(UnionStringLongDoubleBoolean::Double(value)) } fn visit_bool<E>(self, value: bool) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(UnionStringLongDoubleBoolean::Boolean(value)) } } deserializer.deserialize_any(UnionStringLongDoubleBooleanVisitor) } } #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct Contact { pub extra: ::std::collections::HashMap<String, Option<UnionStringLongDoubleBoolean>>, } impl Default for Contact { fn default() -> Contact { Contact { extra: ::std::collections::HashMap::new(), } } } "#; let g = Generator::builder() .use_avro_rs_unions(true) .build() .unwrap(); assert_schema_gen!(g, expected, raw_schema); let raw_schema = r#" { "type": "record", "name": "AvroFileId", "fields": [ { "name": "id", "type": [ "string", { "type": "record", "name": "AvroShortUUID", "fields": [ { "name": "mostBits", "type": "long" }, { "name": "leastBits", "type": "long" } ] } ] } ] } "#; let expected = r#" #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct AvroShortUuid { #[serde(rename = "mostBits")] pub most_bits: i64, #[serde(rename = "leastBits")] pub least_bits: i64, } impl Default for AvroShortUuid { fn default() -> AvroShortUuid { AvroShortUuid { most_bits: 0, least_bits: 0, } } } /// Auto-generated type for unnamed Avro union variants. #[derive(Debug, PartialEq, Clone, serde::Serialize)] pub enum UnionStringAvroShortUuid { String(String), AvroShortUuid(AvroShortUuid), } impl<'de> serde::Deserialize<'de> for UnionStringAvroShortUuid { fn deserialize<D>(deserializer: D) -> Result<UnionStringAvroShortUuid, D::Error> where D: serde::Deserializer<'de>, { /// Serde visitor for the auto-generated unnamed Avro union type. struct UnionStringAvroShortUuidVisitor; impl<'de> serde::de::Visitor<'de> for UnionStringAvroShortUuidVisitor { type Value = UnionStringAvroShortUuid; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str("a UnionStringAvroShortUuid") } } deserializer.deserialize_any(UnionStringAvroShortUuidVisitor) } } #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct AvroFileId { pub id: UnionStringAvroShortUuid, } impl Default for AvroFileId { fn default() -> AvroFileId { AvroFileId { id: UnionStringAvroShortUuid::String(String::default()), } } } "#; let g = Generator::builder() .use_avro_rs_unions(true) .build() .unwrap(); assert_schema_gen!(g, expected, raw_schema); } #[test] fn nullable_gen() { let raw_schema = r#" { "type": "record", "name": "test", "fields": [ {"name": "a", "type": "long", "default": 42}, {"name": "b-b", "type": "string", "default": "na"}, {"name": "c", "type": ["null", "int"], "default": null} ] } "#; let expected = r#" macro_rules! deser( ($name:ident, $rtype:ty, $val:expr) => ( fn $name<'de, D>(deserializer: D) -> Result<$rtype, D::Error> where D: serde::Deserializer<'de>, { let opt = Option::deserialize(deserializer)?; Ok(opt.unwrap_or_else(|| $val)) } ); ); #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct Test { #[serde(deserialize_with = "nullable_test_a")] pub a: i64, #[serde(rename = "b-b", deserialize_with = "nullable_test_b_b")] pub b_b: String, pub c: Option<i32>, } deser!(nullable_test_a, i64, 42); deser!(nullable_test_b_b, String, "na".to_owned()); impl Default for Test { fn default() -> Test { Test { a: 42, b_b: "na".to_owned(), c: None, } } } "#; let g = Generator::builder().nullable(true).build().unwrap(); assert_schema_gen!(g, expected, raw_schema); } #[test] fn nullable_code() { use serde::{Deserialize, Deserializer}; macro_rules! deser( ($name:ident, $rtype:ty, $val:expr) => ( fn $name<'de, D>(deserializer: D) -> std::result::Result<$rtype, D::Error> where D: Deserializer<'de>, { let opt = Option::deserialize(deserializer)?; Ok(opt.unwrap_or_else(|| $val)) } ); ); #[derive(Debug, PartialEq, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct Test { #[serde(deserialize_with = "nullable_test_a")] pub a: i64, #[serde(rename = "b-b", deserialize_with = "nullable_test_b_b")] pub b_b: String, pub c: Option<i32>, } deser!(nullable_test_a, i64, 42); deser!(nullable_test_b_b, String, "na".to_owned()); impl Default for Test { fn default() -> Test { Test { a: 42, b_b: "na".to_owned(), c: None, } } } let json = r#"{"a": null, "b-b": null, "c": null}"#; let res: Test = serde_json::from_str(json).unwrap(); assert_eq!(Test::default(), res); } #[test] fn deps() { let raw_schema = r#" { "type": "record", "name": "User", "fields": [ {"name": "name", "type": "string", "default": "unknown"}, {"name": "address", "type": { "type": "record", "name": "Address", "fields": [ {"name": "city", "type": "string", "default": "unknown"}, {"name": "country", "type": {"type": "enum", "name": "Country", "symbols": ["FR", "JP"]} } ] } } ] } "#; let schema = Schema::parse_str(&raw_schema).unwrap(); let mut deps = deps_stack(&schema, vec![]); let s = deps.pop().unwrap(); assert!(matches!(s, Schema::Enum{ name: Name { ref name, ..}, ..} if name == "Country")); let s = deps.pop().unwrap(); assert!(matches!(s, Schema::Record{ name: Name { ref name, ..}, ..} if name == "Address")); let s = deps.pop().unwrap(); assert!(matches!(s, Schema::Record{ name: Name { ref name, ..}, ..} if name == "User")); let s = deps.pop(); assert!(matches!(s, None)); } #[test] fn cross_deps() -> std::result::Result<(), Box<dyn std::error::Error>> { use std::fs::File; use std::io::Write; use tempfile::tempdir; let dir = tempdir()?; let mut schema_a_file = File::create(dir.path().join("schema_a.avsc"))?; let schema_a_str = r#" { "name": "A", "type": "record", "fields": [ {"name": "field_one", "type": "float"} ] } "#; schema_a_file.write_all(schema_a_str.as_bytes())?; let mut schema_b_file = File::create(dir.path().join("schema_b.avsc"))?; let schema_b_str = r#" { "name": "B", "type": "record", "fields": [ {"name": "field_one", "type": "A"} ] } "#; schema_b_file.write_all(schema_b_str.as_bytes())?; let expected = r#" #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct B { pub field_one: A, } impl Default for B { fn default() -> B { B { field_one: A::default(), } } } #[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)] #[serde(default)] pub struct A { pub field_one: f32, } impl Default for A { fn default() -> A { A { field_one: 0.0, } } } "#; let pattern = format!("{}/*.avsc", dir.path().display()); let source = Source::GlobPattern(pattern.as_str()); let g = Generator::new()?; let mut buf = vec![]; g.gen(&source, &mut buf)?; let res = String::from_utf8(buf)?; println!("{}", res); assert_eq!(expected, res); drop(schema_a_file); drop(schema_b_file); dir.close()?; Ok(()) } }
27.58
110
0.511965
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn simple() {\n let raw_schema = r#\"\n{\n \"type\": \"record\",\n \"name\": \"test\",\n \"fields\": [\n {\"name\": \"a\", \"type\": \"long\", \"default\": 42},\n {\"name\": \"b\", \"type\": \"string\"}\n ]\n}\n\"#;\n\n let expected = \"\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct Test {\n pub a: i64,\n pub b: String,\n}\n\nimpl Default for Test {\n fn default() -> Test {\n Test {\n a: 42,\n b: String::default(),\n }\n }\n}\n\";\n\n let g = Generator::new().unwrap();\n assert_schema_gen!(g, expected, raw_schema);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn complex() {\n let raw_schema = r#\"\n{\n \"type\": \"record\",\n \"name\": \"User\",\n \"doc\": \"Hi there.\",\n \"fields\": [\n {\"name\": \"name\", \"type\": \"string\", \"default\": \"\"},\n {\"name\": \"favorite_number\", \"type\": \"int\", \"default\": 7},\n {\"name\": \"likes_pizza\", \"type\": \"boolean\", \"default\": false},\n {\"name\": \"oye\", \"type\": \"float\", \"default\": 1.1},\n {\"name\": \"aa-i32\",\n \"type\": {\"type\": \"array\", \"items\": {\"type\": \"array\", \"items\": \"int\"}},\n \"default\": [[0], [12, -1]]}\n ]\n}\n\"#;\n\n let expected = r#\"\n/// Hi there.\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct User {\n pub name: String,\n pub favorite_number: i32,\n pub likes_pizza: bool,\n pub oye: f32,\n #[serde(rename = \"aa-i32\")]\n pub aa_i32: Vec<Vec<i32>>,\n}\n\nimpl Default for User {\n fn default() -> User {\n User {\n name: \"\".to_owned(),\n favorite_number: 7,\n likes_pizza: false,\n oye: 1.100,\n aa_i32: vec![vec![0], vec![12, -1]],\n }\n }\n}\n\"#;\n\n let g = Generator::new().unwrap();\n assert_schema_gen!(g, expected, raw_schema);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn optional_array() {\n let raw_schema = r#\"\n{\n \"name\": \"Snmp\",\n \"type\": \"record\",\n \"fields\": [ {\n \"name\": \"v1\",\n \"type\": [ \"null\", {\n \"name\": \"V1\",\n \"type\": \"record\",\n \"fields\": [ {\n \"name\": \"pdu\",\n \"type\": [ \"null\", {\n \"name\": \"TrapV1\",\n \"type\": \"record\",\n \"fields\": [ {\n \"name\": \"var\",\n \"type\": [\"null\", {\n \"type\": \"array\",\n \"items\": {\n \"name\": \"Variable\",\n \"type\": \"record\",\n \"fields\": [ {\n \"name\": \"oid\",\n \"type\": [\"null\", {\n \"type\":\"array\",\n \"items\": \"long\"\n } ],\n \"default\": null\n }, {\n \"name\": \"val\",\n \"type\": [\"null\", \"string\"],\n \"default\": null\n } ],\n \"default\": {}\n }\n } ],\n \"default\": null\n } ]\n } ],\n \"default\": null\n } ]\n } ],\n \"default\": null\n } ],\n \"default\": {}\n}\n\"#;\n\n let expected = r#\"\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct Variable {\n pub oid: Option<Vec<i64>>,\n pub val: Option<String>,\n}\n\nimpl Default for Variable {\n fn default() -> Variable {\n Variable {\n oid: None,\n val: None,\n }\n }\n}\n\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct TrapV1 {\n pub var: Option<Vec<Variable>>,\n}\n\nimpl Default for TrapV1 {\n fn default() -> TrapV1 {\n TrapV1 {\n var: None,\n }\n }\n}\n\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct V1 {\n pub pdu: Option<TrapV1>,\n}\n\nimpl Default for V1 {\n fn default() -> V1 {\n V1 {\n pdu: None,\n }\n }\n}\n\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct Snmp {\n pub v1: Option<V1>,\n}\n\nimpl Default for Snmp {\n fn default() -> Snmp {\n Snmp {\n v1: None,\n }\n }\n}\n\"#;\n\n let g = Generator::new().unwrap();\n assert_schema_gen!(g, expected, raw_schema);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn optional_arrays() {\n let raw_schema = r#\"\n{\n \"type\": \"record\",\n \"name\": \"KsqlDataSourceSchema\",\n \"namespace\": \"io.confluent.ksql.avro_schemas\",\n \"fields\": [ {\n \"name\": \"ID\",\n \"type\": [\"null\", \"string\"],\n \"default\": null\n }, {\n \"name\": \"GROUP_IDS\",\n \"type\": [\"null\", {\n \"type\": \"array\",\n \"items\": [\"null\", \"string\"]\n } ],\n \"default\": null\n }, {\n \"name\": \"GROUP_NAMES\",\n \"type\": [\"null\", {\n \"type\": \"array\",\n \"items\": [\"null\", \"string\"]\n } ],\n \"default\": null\n } ]\n}\n\"#;\n\n let expected = r#\"\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct KsqlDataSourceSchema {\n #[serde(rename = \"ID\")]\n pub id: Option<String>,\n #[serde(rename = \"GROUP_IDS\")]\n pub group_ids: Option<Vec<Option<String>>>,\n #[serde(rename = \"GROUP_NAMES\")]\n pub group_names: Option<Vec<Option<String>>>,\n}\n\nimpl Default for KsqlDataSourceSchema {\n fn default() -> KsqlDataSourceSchema {\n KsqlDataSourceSchema {\n id: None,\n group_ids: None,\n group_names: None,\n }\n }\n}\n\"#;\n\n let g = Generator::new().unwrap();\n assert_schema_gen!(g, expected, raw_schema);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn multi_valued_union() {\n let raw_schema = r#\"\n{\n \"type\": \"record\",\n \"name\": \"Contact\",\n \"namespace\": \"com.test\",\n \"fields\": [ {\n \"name\": \"extra\",\n \"type\": \"map\",\n \"values\" : [ \"null\", \"string\", \"long\", \"double\", \"boolean\" ]\n } ]\n}\n\"#;\n\n let expected = r#\"\n/// Auto-generated type for unnamed Avro union variants.\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\npub enum UnionStringLongDoubleBoolean {\n String(String),\n Long(i64),\n Double(f64),\n Boolean(bool),\n}\n\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct Contact {\n pub extra: ::std::collections::HashMap<String, Option<UnionStringLongDoubleBoolean>>,\n}\n\nimpl Default for Contact {\n fn default() -> Contact {\n Contact {\n extra: ::std::collections::HashMap::new(),\n }\n }\n}\n\"#;\n\n let g = Generator::new().unwrap();\n assert_schema_gen!(g, expected, raw_schema);\n\n let raw_schema = r#\"\n{\n \"type\": \"record\",\n \"name\": \"AvroFileId\",\n \"fields\": [ {\n \"name\": \"id\",\n \"type\": [\n \"string\", {\n \"type\": \"record\",\n \"name\": \"AvroShortUUID\",\n \"fields\": [ {\n \"name\": \"mostBits\",\n \"type\": \"long\"\n }, {\n \"name\": \"leastBits\",\n \"type\": \"long\"\n } ]\n } ]\n } ]\n}\n\"#;\n\n let expected = r#\"\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct AvroShortUuid {\n #[serde(rename = \"mostBits\")]\n pub most_bits: i64,\n #[serde(rename = \"leastBits\")]\n pub least_bits: i64,\n}\n\nimpl Default for AvroShortUuid {\n fn default() -> AvroShortUuid {\n AvroShortUuid {\n most_bits: 0,\n least_bits: 0,\n }\n }\n}\n\n/// Auto-generated type for unnamed Avro union variants.\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\npub enum UnionStringAvroShortUuid {\n String(String),\n AvroShortUuid(AvroShortUuid),\n}\n\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct AvroFileId {\n pub id: UnionStringAvroShortUuid,\n}\n\nimpl Default for AvroFileId {\n fn default() -> AvroFileId {\n AvroFileId {\n id: UnionStringAvroShortUuid::String(String::default()),\n }\n }\n}\n\"#;\n\n let g = Generator::new().unwrap();\n assert_schema_gen!(g, expected, raw_schema);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn multi_valued_union_with_variant_access() {\n let raw_schema = r#\"\n{\n \"type\": \"record\",\n \"name\": \"Contact\",\n \"namespace\": \"com.test\",\n \"fields\": [ {\n \"name\": \"extra\",\n \"type\": \"map\",\n \"values\" : [ \"null\", \"string\", \"long\", \"double\", \"boolean\" ]\n } ]\n}\n\"#;\n\n let expected = r#\"\n/// Auto-generated type for unnamed Avro union variants.\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize, variant_access_derive::VariantAccess)]\npub enum UnionStringLongDoubleBoolean {\n String(String),\n Long(i64),\n Double(f64),\n Boolean(bool),\n}\n\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct Contact {\n pub extra: ::std::collections::HashMap<String, Option<UnionStringLongDoubleBoolean>>,\n}\n\nimpl Default for Contact {\n fn default() -> Contact {\n Contact {\n extra: ::std::collections::HashMap::new(),\n }\n }\n}\n\"#;\n\n let g = Generator::builder()\n .use_variant_access(true)\n .build()\n .unwrap();\n assert_schema_gen!(g, expected, raw_schema);\n\n let raw_schema = r#\"\n{\n \"type\": \"record\",\n \"name\": \"AvroFileId\",\n \"fields\": [ {\n \"name\": \"id\",\n \"type\": [\n \"string\", {\n \"type\": \"record\",\n \"name\": \"AvroShortUUID\",\n \"fields\": [ {\n \"name\": \"mostBits\",\n \"type\": \"long\"\n }, {\n \"name\": \"leastBits\",\n \"type\": \"long\"\n } ]\n } ]\n } ]\n}\n\"#;\n\n let expected = r#\"\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct AvroShortUuid {\n #[serde(rename = \"mostBits\")]\n pub most_bits: i64,\n #[serde(rename = \"leastBits\")]\n pub least_bits: i64,\n}\n\nimpl Default for AvroShortUuid {\n fn default() -> AvroShortUuid {\n AvroShortUuid {\n most_bits: 0,\n least_bits: 0,\n }\n }\n}\n\n/// Auto-generated type for unnamed Avro union variants.\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize, variant_access_derive::VariantAccess)]\npub enum UnionStringAvroShortUuid {\n String(String),\n AvroShortUuid(AvroShortUuid),\n}\n\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct AvroFileId {\n pub id: UnionStringAvroShortUuid,\n}\n\nimpl Default for AvroFileId {\n fn default() -> AvroFileId {\n AvroFileId {\n id: UnionStringAvroShortUuid::String(String::default()),\n }\n }\n}\n\"#;\n\n let g = Generator::builder()\n .use_variant_access(true)\n .build()\n .unwrap();\n assert_schema_gen!(g, expected, raw_schema);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn multi_valued_union_with_avro_rs_unions() {\n let raw_schema = r#\"\n{\n \"type\": \"record\",\n \"name\": \"Contact\",\n \"namespace\": \"com.test\",\n \"fields\": [ {\n \"name\": \"extra\",\n \"type\": \"map\",\n \"values\" : [ \"null\", \"string\", \"long\", \"double\", \"boolean\" ]\n } ]\n}\n\"#;\n\n let expected = r#\"\n/// Auto-generated type for unnamed Avro union variants.\n#[derive(Debug, PartialEq, Clone, serde::Serialize)]\npub enum UnionStringLongDoubleBoolean {\n String(String),\n Long(i64),\n Double(f64),\n Boolean(bool),\n}\n\nimpl<'de> serde::Deserialize<'de> for UnionStringLongDoubleBoolean {\n fn deserialize<D>(deserializer: D) -> Result<UnionStringLongDoubleBoolean, D::Error>\n where\n D: serde::Deserializer<'de>,\n {\n /// Serde visitor for the auto-generated unnamed Avro union type.\n struct UnionStringLongDoubleBooleanVisitor;\n\n impl<'de> serde::de::Visitor<'de> for UnionStringLongDoubleBooleanVisitor {\n type Value = UnionStringLongDoubleBoolean;\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n formatter.write_str(\"a UnionStringLongDoubleBoolean\")\n }\n\n fn visit_i64<E>(self, value: i64) -> Result<Self::Value, E>\n where\n E: serde::de::Error,\n {\n Ok(UnionStringLongDoubleBoolean::Long(value))\n }\n\n fn visit_f64<E>(self, value: f64) -> Result<Self::Value, E>\n where\n E: serde::de::Error,\n {\n Ok(UnionStringLongDoubleBoolean::Double(value))\n }\n\n fn visit_bool<E>(self, value: bool) -> Result<Self::Value, E>\n where\n E: serde::de::Error,\n {\n Ok(UnionStringLongDoubleBoolean::Boolean(value))\n }\n }\n\n deserializer.deserialize_any(UnionStringLongDoubleBooleanVisitor)\n }\n}\n\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct Contact {\n pub extra: ::std::collections::HashMap<String, Option<UnionStringLongDoubleBoolean>>,\n}\n\nimpl Default for Contact {\n fn default() -> Contact {\n Contact {\n extra: ::std::collections::HashMap::new(),\n }\n }\n}\n\"#;\n\n let g = Generator::builder()\n .use_avro_rs_unions(true)\n .build()\n .unwrap();\n assert_schema_gen!(g, expected, raw_schema);\n\n let raw_schema = r#\"\n{\n \"type\": \"record\",\n \"name\": \"AvroFileId\",\n \"fields\": [ {\n \"name\": \"id\",\n \"type\": [\n \"string\", {\n \"type\": \"record\",\n \"name\": \"AvroShortUUID\",\n \"fields\": [ {\n \"name\": \"mostBits\",\n \"type\": \"long\"\n }, {\n \"name\": \"leastBits\",\n \"type\": \"long\"\n } ]\n } ]\n } ]\n}\n\"#;\n\n let expected = r#\"\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct AvroShortUuid {\n #[serde(rename = \"mostBits\")]\n pub most_bits: i64,\n #[serde(rename = \"leastBits\")]\n pub least_bits: i64,\n}\n\nimpl Default for AvroShortUuid {\n fn default() -> AvroShortUuid {\n AvroShortUuid {\n most_bits: 0,\n least_bits: 0,\n }\n }\n}\n\n/// Auto-generated type for unnamed Avro union variants.\n#[derive(Debug, PartialEq, Clone, serde::Serialize)]\npub enum UnionStringAvroShortUuid {\n String(String),\n AvroShortUuid(AvroShortUuid),\n}\n\nimpl<'de> serde::Deserialize<'de> for UnionStringAvroShortUuid {\n fn deserialize<D>(deserializer: D) -> Result<UnionStringAvroShortUuid, D::Error>\n where\n D: serde::Deserializer<'de>,\n {\n /// Serde visitor for the auto-generated unnamed Avro union type.\n struct UnionStringAvroShortUuidVisitor;\n\n impl<'de> serde::de::Visitor<'de> for UnionStringAvroShortUuidVisitor {\n type Value = UnionStringAvroShortUuid;\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n formatter.write_str(\"a UnionStringAvroShortUuid\")\n }\n }\n\n deserializer.deserialize_any(UnionStringAvroShortUuidVisitor)\n }\n}\n\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct AvroFileId {\n pub id: UnionStringAvroShortUuid,\n}\n\nimpl Default for AvroFileId {\n fn default() -> AvroFileId {\n AvroFileId {\n id: UnionStringAvroShortUuid::String(String::default()),\n }\n }\n}\n\"#;\n\n let g = Generator::builder()\n .use_avro_rs_unions(true)\n .build()\n .unwrap();\n assert_schema_gen!(g, expected, raw_schema);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn nullable_gen() {\n let raw_schema = r#\"\n{\n \"type\": \"record\",\n \"name\": \"test\",\n \"fields\": [\n {\"name\": \"a\", \"type\": \"long\", \"default\": 42},\n {\"name\": \"b-b\", \"type\": \"string\", \"default\": \"na\"},\n {\"name\": \"c\", \"type\": [\"null\", \"int\"], \"default\": null}\n ]\n}\n\"#;\n\n let expected = r#\"\nmacro_rules! deser(\n ($name:ident, $rtype:ty, $val:expr) => (\n fn $name<'de, D>(deserializer: D) -> Result<$rtype, D::Error>\n where\n D: serde::Deserializer<'de>,\n {\n let opt = Option::deserialize(deserializer)?;\n Ok(opt.unwrap_or_else(|| $val))\n }\n );\n);\n\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct Test {\n #[serde(deserialize_with = \"nullable_test_a\")]\n pub a: i64,\n #[serde(rename = \"b-b\", deserialize_with = \"nullable_test_b_b\")]\n pub b_b: String,\n pub c: Option<i32>,\n}\ndeser!(nullable_test_a, i64, 42);\ndeser!(nullable_test_b_b, String, \"na\".to_owned());\n\nimpl Default for Test {\n fn default() -> Test {\n Test {\n a: 42,\n b_b: \"na\".to_owned(),\n c: None,\n }\n }\n}\n\"#;\n let g = Generator::builder().nullable(true).build().unwrap();\n assert_schema_gen!(g, expected, raw_schema);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn nullable_code() {\n use serde::{Deserialize, Deserializer};\n\n macro_rules! deser(\n ($name:ident, $rtype:ty, $val:expr) => (\n fn $name<'de, D>(deserializer: D) -> std::result::Result<$rtype, D::Error>\n where\n D: Deserializer<'de>,\n {\n let opt = Option::deserialize(deserializer)?;\n Ok(opt.unwrap_or_else(|| $val))\n }\n );\n );\n\n #[derive(Debug, PartialEq, serde::Deserialize, serde::Serialize)]\n #[serde(default)]\n pub struct Test {\n #[serde(deserialize_with = \"nullable_test_a\")]\n pub a: i64,\n #[serde(rename = \"b-b\", deserialize_with = \"nullable_test_b_b\")]\n pub b_b: String,\n pub c: Option<i32>,\n }\n deser!(nullable_test_a, i64, 42);\n deser!(nullable_test_b_b, String, \"na\".to_owned());\n\n impl Default for Test {\n fn default() -> Test {\n Test {\n a: 42,\n b_b: \"na\".to_owned(),\n c: None,\n }\n }\n }\n\n let json = r#\"{\"a\": null, \"b-b\": null, \"c\": null}\"#;\n let res: Test = serde_json::from_str(json).unwrap();\n assert_eq!(Test::default(), res);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn deps() {\n let raw_schema = r#\"\n{\n \"type\": \"record\",\n \"name\": \"User\",\n \"fields\": [\n {\"name\": \"name\", \"type\": \"string\", \"default\": \"unknown\"},\n {\"name\": \"address\",\n \"type\": {\n \"type\": \"record\",\n \"name\": \"Address\",\n \"fields\": [\n {\"name\": \"city\", \"type\": \"string\", \"default\": \"unknown\"},\n {\"name\": \"country\",\n \"type\": {\"type\": \"enum\", \"name\": \"Country\", \"symbols\": [\"FR\", \"JP\"]}\n }\n ]\n }\n }\n ]\n}\n\"#;\n\n let schema = Schema::parse_str(&raw_schema).unwrap();\n let mut deps = deps_stack(&schema, vec![]);\n\n let s = deps.pop().unwrap();\n assert!(matches!(s, Schema::Enum{ name: Name { ref name, ..}, ..} if name == \"Country\"));\n\n let s = deps.pop().unwrap();\n assert!(matches!(s, Schema::Record{ name: Name { ref name, ..}, ..} if name == \"Address\"));\n\n let s = deps.pop().unwrap();\n assert!(matches!(s, Schema::Record{ name: Name { ref name, ..}, ..} if name == \"User\"));\n\n let s = deps.pop();\n assert!(matches!(s, None));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn cross_deps() -> std::result::Result<(), Box<dyn std::error::Error>> {\n use std::fs::File;\n use std::io::Write;\n use tempfile::tempdir;\n\n let dir = tempdir()?;\n\n let mut schema_a_file = File::create(dir.path().join(\"schema_a.avsc\"))?;\n let schema_a_str = r#\"\n{\n \"name\": \"A\",\n \"type\": \"record\",\n \"fields\": [ {\"name\": \"field_one\", \"type\": \"float\"} ]\n}\n\"#;\n schema_a_file.write_all(schema_a_str.as_bytes())?;\n\n let mut schema_b_file = File::create(dir.path().join(\"schema_b.avsc\"))?;\n let schema_b_str = r#\"\n{\n \"name\": \"B\",\n \"type\": \"record\",\n \"fields\": [ {\"name\": \"field_one\", \"type\": \"A\"} ]\n}\n\"#;\n schema_b_file.write_all(schema_b_str.as_bytes())?;\n\n let expected = r#\"\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct B {\n pub field_one: A,\n}\n\nimpl Default for B {\n fn default() -> B {\n B {\n field_one: A::default(),\n }\n }\n}\n\n#[derive(Debug, PartialEq, Clone, serde::Deserialize, serde::Serialize)]\n#[serde(default)]\npub struct A {\n pub field_one: f32,\n}\n\nimpl Default for A {\n fn default() -> A {\n A {\n field_one: 0.0,\n }\n }\n}\n\"#;\n\n let pattern = format!(\"{}/*.avsc\", dir.path().display());\n let source = Source::GlobPattern(pattern.as_str());\n let g = Generator::new()?;\n let mut buf = vec![];\n g.gen(&source, &mut buf)?;\n let res = String::from_utf8(buf)?;\n println!(\"{}\", res);\n\n assert_eq!(expected, res);\n\n drop(schema_a_file);\n drop(schema_b_file);\n dir.close()?;\n Ok(())\n }\n}" ]
f70a3e216f29d7a2a06ddf436f4868ffebf9601f
19,775
rs
Rust
src/state.rs
carlad/amethyst
8f026945c60dceba2b383e86cb46703e0ee8f362
[ "MIT" ]
null
null
null
src/state.rs
carlad/amethyst
8f026945c60dceba2b383e86cb46703e0ee8f362
[ "MIT" ]
null
null
null
src/state.rs
carlad/amethyst
8f026945c60dceba2b383e86cb46703e0ee8f362
[ "MIT" ]
null
null
null
//! Utilities for game state management. use amethyst_input::is_close_requested; use crate::{ecs::prelude::World, GameData, StateEvent}; use std::fmt::Result as FmtResult; use std::fmt::{Display, Formatter}; /// Error type for errors occurring in StateMachine #[derive(Debug)] pub enum StateError { NoStatesPresent, } impl Display for StateError { fn fmt(&self, fmt: &mut Formatter<'_>) -> FmtResult { match *self { StateError::NoStatesPresent => write!( fmt, "Tried to start state machine without any states present" ), } } } /// State data encapsulates the data sent to all state functions from the application main loop. pub struct StateData<'a, T> where T: 'a, { /// Main `World` pub world: &'a mut World, /// User defined game data pub data: &'a mut T, } impl<'a, T> StateData<'a, T> where T: 'a, { /// Create a new state data pub fn new(world: &'a mut World, data: &'a mut T) -> Self { StateData { world, data } } } /// Types of state transitions. /// T is the type of shared data between states. /// E is the type of events pub enum Trans<T, E> { /// Continue as normal. None, /// Remove the active state and resume the next state on the stack or stop /// if there are none. Pop, /// Pause the active state and push a new state onto the stack. Push(Box<dyn State<T, E>>), /// Remove the current state on the stack and insert a different one. Switch(Box<dyn State<T, E>>), /// Stop and remove all states and shut down the engine. Quit, } /// Event queue to trigger state `Trans` from other places than a `State`'s methods. /// # Example: /// ```rust, ignore /// world.write_resource::<EventChannel<TransEvent<MyGameData, StateEvent>>>().single_write(Box::new(|| Trans::Quit)); /// ``` /// /// Transitions will be executed sequentially by Amethyst's `CoreApplication` update loop. pub type TransEvent<T, E> = Box<dyn Fn() -> Trans<T, E> + Send + Sync + 'static>; /// An empty `Trans`. Made to be used with `EmptyState`. pub type EmptyTrans = Trans<(), StateEvent>; /// A simple default `Trans`. Made to be used with `SimpleState`. /// By default it contains a `GameData` as its `StateData` and doesn't have a custom event type. pub type SimpleTrans<'a, 'b> = Trans<GameData<'a, 'b>, StateEvent>; /// A trait which defines game states that can be used by the state machine. pub trait State<T, E: Send + Sync + 'static> { /// Executed when the game state begins. fn on_start(&mut self, _data: StateData<'_, T>) {} /// Executed when the game state exits. fn on_stop(&mut self, _data: StateData<'_, T>) {} /// Executed when a different game state is pushed onto the stack. fn on_pause(&mut self, _data: StateData<'_, T>) {} /// Executed when the application returns to this game state once again. fn on_resume(&mut self, _data: StateData<'_, T>) {} /// Executed on every frame before updating, for use in reacting to events. fn handle_event(&mut self, _data: StateData<'_, T>, _event: E) -> Trans<T, E> { Trans::None } /// Executed repeatedly at stable, predictable intervals (1/60th of a second /// by default), /// if this is the active state. fn fixed_update(&mut self, _data: StateData<'_, T>) -> Trans<T, E> { Trans::None } /// Executed on every frame immediately, as fast as the engine will allow (taking into account the frame rate limit), /// if this is the active state. fn update(&mut self, _data: StateData<'_, T>) -> Trans<T, E> { Trans::None } /// Executed repeatedly at stable, predictable intervals (1/60th of a second /// by default), /// even when this is not the active state, /// as long as this state is on the [StateMachine](struct.StateMachine.html)'s state-stack. fn shadow_fixed_update(&mut self, _data: StateData<'_, T>) {} /// Executed on every frame immediately, as fast as the engine will allow (taking into account the frame rate limit), /// even when this is not the active state, /// as long as this state is on the [StateMachine](struct.StateMachine.html)'s state-stack. fn shadow_update(&mut self, _data: StateData<'_, T>) {} } /// An empty `State` trait. It contains no `StateData` or custom `StateEvent`. pub trait EmptyState { /// Executed when the game state begins. fn on_start(&mut self, _data: StateData<'_, ()>) {} /// Executed when the game state exits. fn on_stop(&mut self, _data: StateData<'_, ()>) {} /// Executed when a different game state is pushed onto the stack. fn on_pause(&mut self, _data: StateData<'_, ()>) {} /// Executed when the application returns to this game state once again. fn on_resume(&mut self, _data: StateData<'_, ()>) {} /// Executed on every frame before updating, for use in reacting to events. fn handle_event(&mut self, _data: StateData<'_, ()>, event: StateEvent) -> EmptyTrans { if let StateEvent::Window(event) = &event { if is_close_requested(&event) { Trans::Quit } else { Trans::None } } else { Trans::None } } /// Executed repeatedly at stable, predictable intervals (1/60th of a second /// by default). fn fixed_update(&mut self, _data: StateData<'_, ()>) -> EmptyTrans { Trans::None } /// Executed on every frame immediately, as fast as the engine will allow (taking into account the frame rate limit). fn update(&mut self, _data: StateData<'_, ()>) -> EmptyTrans { Trans::None } /// Executed repeatedly at stable, predictable intervals (1/60th of a second /// by default), /// even when this is not the active state, /// as long as this state is on the [StateMachine](struct.StateMachine.html)'s state-stack. fn shadow_fixed_update(&mut self, _data: StateData<'_, ()>) {} /// Executed on every frame immediately, as fast as the engine will allow (taking into account the frame rate limit), /// even when this is not the active state, /// as long as this state is on the [StateMachine](struct.StateMachine.html)'s state-stack. fn shadow_update(&mut self, _data: StateData<'_, ()>) {} } impl<T: EmptyState> State<(), StateEvent> for T { /// Executed when the game state begins. fn on_start(&mut self, data: StateData<'_, ()>) { self.on_start(data) } /// Executed when the game state exits. fn on_stop(&mut self, data: StateData<'_, ()>) { self.on_stop(data) } /// Executed when a different game state is pushed onto the stack. fn on_pause(&mut self, data: StateData<'_, ()>) { self.on_pause(data) } /// Executed when the application returns to this game state once again. fn on_resume(&mut self, data: StateData<'_, ()>) { self.on_resume(data) } /// Executed on every frame before updating, for use in reacting to events. fn handle_event(&mut self, data: StateData<'_, ()>, event: StateEvent) -> EmptyTrans { self.handle_event(data, event) } /// Executed repeatedly at stable, predictable intervals (1/60th of a second /// by default). fn fixed_update(&mut self, data: StateData<'_, ()>) -> EmptyTrans { self.fixed_update(data) } /// Executed on every frame immediately, as fast as the engine will allow (taking into account the frame rate limit). fn update(&mut self, data: StateData<'_, ()>) -> EmptyTrans { self.update(data) } /// Executed repeatedly at stable, predictable intervals (1/60th of a second /// by default), /// even when this is not the active state, /// as long as this state is on the [StateMachine](struct.StateMachine.html)'s state-stack. fn shadow_fixed_update(&mut self, data: StateData<'_, ()>) { self.shadow_fixed_update(data); } /// Executed on every frame immediately, as fast as the engine will allow (taking into account the frame rate limit), /// even when this is not the active state, /// as long as this state is on the [StateMachine](struct.StateMachine.html)'s state-stack. fn shadow_update(&mut self, data: StateData<'_, ()>) { self.shadow_update(data); } } /// A simple `State` trait. It contains `GameData` as its `StateData` and no custom `StateEvent`. pub trait SimpleState<'a, 'b> { /// Executed when the game state begins. fn on_start(&mut self, _data: StateData<'_, GameData<'_, '_>>) {} /// Executed when the game state exits. fn on_stop(&mut self, _data: StateData<'_, GameData<'_, '_>>) {} /// Executed when a different game state is pushed onto the stack. fn on_pause(&mut self, _data: StateData<'_, GameData<'_, '_>>) {} /// Executed when the application returns to this game state once again. fn on_resume(&mut self, _data: StateData<'_, GameData<'_, '_>>) {} /// Executed on every frame before updating, for use in reacting to events. fn handle_event( &mut self, _data: StateData<'_, GameData<'_, '_>>, event: StateEvent, ) -> SimpleTrans<'a, 'b> { if let StateEvent::Window(event) = &event { if is_close_requested(&event) { Trans::Quit } else { Trans::None } } else { Trans::None } } /// Executed repeatedly at stable, predictable intervals (1/60th of a second /// by default). fn fixed_update(&mut self, _data: StateData<'_, GameData<'_, '_>>) -> SimpleTrans<'a, 'b> { Trans::None } /// Executed on every frame immediately, as fast as the engine will allow (taking into account the frame rate limit). fn update(&mut self, _data: &mut StateData<'_, GameData<'_, '_>>) -> SimpleTrans<'a, 'b> { Trans::None } /// Executed repeatedly at stable, predictable intervals (1/60th of a second /// by default), /// even when this is not the active state, /// as long as this state is on the [StateMachine](struct.StateMachine.html)'s state-stack. fn shadow_fixed_update(&mut self, _data: StateData<'_, GameData<'_, '_>>) {} /// Executed on every frame immediately, as fast as the engine will allow (taking into account the frame rate limit), /// even when this is not the active state, /// as long as this state is on the [StateMachine](struct.StateMachine.html)'s state-stack. fn shadow_update(&mut self, _data: StateData<'_, GameData<'_, '_>>) {} } impl<'a, 'b, T: SimpleState<'a, 'b>> State<GameData<'a, 'b>, StateEvent> for T { //pub trait SimpleState<'a,'b>: State<GameData<'a,'b>,()> { /// Executed when the game state begins. fn on_start(&mut self, data: StateData<'_, GameData<'_, '_>>) { self.on_start(data) } /// Executed when the game state exits. fn on_stop(&mut self, data: StateData<'_, GameData<'_, '_>>) { self.on_stop(data) } /// Executed when a different game state is pushed onto the stack. fn on_pause(&mut self, data: StateData<'_, GameData<'_, '_>>) { self.on_pause(data) } /// Executed when the application returns to this game state once again. fn on_resume(&mut self, data: StateData<'_, GameData<'_, '_>>) { self.on_resume(data) } /// Executed on every frame before updating, for use in reacting to events. fn handle_event( &mut self, data: StateData<'_, GameData<'_, '_>>, event: StateEvent, ) -> SimpleTrans<'a, 'b> { self.handle_event(data, event) } /// Executed repeatedly at stable, predictable intervals (1/60th of a second /// by default). fn fixed_update(&mut self, data: StateData<'_, GameData<'_, '_>>) -> SimpleTrans<'a, 'b> { self.fixed_update(data) } /// Executed on every frame immediately, as fast as the engine will allow (taking into account the frame rate limit). fn update(&mut self, mut data: StateData<'_, GameData<'_, '_>>) -> SimpleTrans<'a, 'b> { let r = self.update(&mut data); data.data.update(&data.world); r } /// Executed repeatedly at stable, predictable intervals (1/60th of a second /// by default), /// even when this is not the active state, /// as long as this state is on the [StateMachine](struct.StateMachine.html)'s state-stack. fn shadow_fixed_update(&mut self, data: StateData<'_, GameData<'_, '_>>) { self.shadow_fixed_update(data); } /// Executed on every frame immediately, as fast as the engine will allow (taking into account the frame rate limit), /// even when this is not the active state, /// as long as this state is on the [StateMachine](struct.StateMachine.html)'s state-stack. fn shadow_update(&mut self, data: StateData<'_, GameData<'_, '_>>) { self.shadow_update(data); } } /// A simple stack-based state machine (pushdown automaton). #[derive(Derivative)] #[derivative(Debug)] pub struct StateMachine<'a, T, E> { running: bool, #[derivative(Debug = "ignore")] state_stack: Vec<Box<dyn State<T, E> + 'a>>, } impl<'a, T, E: Send + Sync + 'static> StateMachine<'a, T, E> { /// Creates a new state machine with the given initial state. pub fn new<S: State<T, E> + 'a>(initial_state: S) -> StateMachine<'a, T, E> { StateMachine { running: false, state_stack: vec![Box::new(initial_state)], } } /// Checks whether the state machine is running. pub fn is_running(&self) -> bool { self.running } /// Initializes the state machine. pub fn start(&mut self, data: StateData<'_, T>) -> Result<(), StateError> { if !self.running { let state = self .state_stack .last_mut() .ok_or(StateError::NoStatesPresent)?; state.on_start(data); self.running = true; } Ok(()) } /// Passes a single event to the active state to handle. pub fn handle_event(&mut self, data: StateData<'_, T>, event: E) { let StateData { world, data } = data; if self.running { let trans = match self.state_stack.last_mut() { Some(state) => state.handle_event(StateData { world, data }, event), None => Trans::None, }; self.transition(trans, StateData { world, data }); } } /// Updates the currently active state at a steady, fixed interval. pub fn fixed_update(&mut self, data: StateData<'_, T>) { let StateData { world, data } = data; if self.running { let trans = match self.state_stack.last_mut() { Some(state) => state.fixed_update(StateData { world, data }), None => Trans::None, }; for state in self.state_stack.iter_mut() { state.shadow_fixed_update(StateData { world, data }); } self.transition(trans, StateData { world, data }); } } /// Updates the currently active state immediately. pub fn update(&mut self, data: StateData<'_, T>) { let StateData { world, data } = data; if self.running { let trans = match self.state_stack.last_mut() { Some(state) => state.update(StateData { world, data }), None => Trans::None, }; for state in self.state_stack.iter_mut() { state.shadow_update(StateData { world, data }); } self.transition(trans, StateData { world, data }); } } /// Performs a state transition. /// Usually called by update or fixed_update by the user's defined `State`. /// This method can also be called when there are one or multiple `Trans` stored in the /// global `EventChannel<TransEvent<T, E>>`. Such `Trans` will be passed to this method /// sequentially in the order of insertion. pub fn transition(&mut self, request: Trans<T, E>, data: StateData<'_, T>) { if self.running { match request { Trans::None => (), Trans::Pop => self.pop(data), Trans::Push(state) => self.push(state, data), Trans::Switch(state) => self.switch(state, data), Trans::Quit => self.stop(data), } } } /// Removes the current state on the stack and inserts a different one. fn switch(&mut self, state: Box<dyn State<T, E>>, data: StateData<'_, T>) { if self.running { let StateData { world, data } = data; if let Some(mut state) = self.state_stack.pop() { state.on_stop(StateData { world, data }); } self.state_stack.push(state); //State was just pushed, thus pop will always succeed let state = self.state_stack.last_mut().unwrap(); state.on_start(StateData { world, data }); } } /// Pauses the active state and pushes a new state onto the state stack. fn push(&mut self, state: Box<dyn State<T, E>>, data: StateData<'_, T>) { if self.running { let StateData { world, data } = data; if let Some(state) = self.state_stack.last_mut() { state.on_pause(StateData { world, data }); } self.state_stack.push(state); //State was just pushed, thus pop will always succeed let state = self.state_stack.last_mut().unwrap(); state.on_start(StateData { world, data }); } } /// Stops and removes the active state and un-pauses the next state on the /// stack (if any). fn pop(&mut self, data: StateData<'_, T>) { if self.running { let StateData { world, data } = data; if let Some(mut state) = self.state_stack.pop() { state.on_stop(StateData { world, data }); } if let Some(state) = self.state_stack.last_mut() { state.on_resume(StateData { world, data }); } else { self.running = false; } } } /// Shuts the state machine down. pub(crate) fn stop(&mut self, data: StateData<'_, T>) { if self.running { let StateData { world, data } = data; while let Some(mut state) = self.state_stack.pop() { state.on_stop(StateData { world, data }); } self.running = false; } } } #[cfg(test)] mod tests { use super::*; struct State1(u8); struct State2; impl State<(), ()> for State1 { fn update(&mut self, _: StateData<'_, ()>) -> Trans<(), ()> { if self.0 > 0 { self.0 -= 1; Trans::None } else { Trans::Switch(Box::new(State2)) } } } impl State<(), ()> for State2 { fn update(&mut self, _: StateData<'_, ()>) -> Trans<(), ()> { Trans::Pop } } #[test] fn switch_pop() { use crate::ecs::prelude::World; let mut world = World::new(); let mut sm = StateMachine::new(State1(7)); // Unwrap here is fine because start can only fail when there are no states in the machine. sm.start(StateData::new(&mut world, &mut ())).unwrap(); for _ in 0..8 { sm.update(StateData::new(&mut world, &mut ())); assert!(sm.is_running()); } sm.update(StateData::new(&mut world, &mut ())); assert!(!sm.is_running()); } }
36.217949
121
0.598281
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn switch_pop() {\n use crate::ecs::prelude::World;\n\n let mut world = World::new();\n\n let mut sm = StateMachine::new(State1(7));\n // Unwrap here is fine because start can only fail when there are no states in the machine.\n sm.start(StateData::new(&mut world, &mut ())).unwrap();\n\n for _ in 0..8 {\n sm.update(StateData::new(&mut world, &mut ()));\n assert!(sm.is_running());\n }\n\n sm.update(StateData::new(&mut world, &mut ()));\n assert!(!sm.is_running());\n }\n}" ]
f70a86cf4eddd096740db19ebf13c998e75da57b
6,256
rs
Rust
src/lib.rs
pirogoeth/unix-named-pipe
78bd325c82d13451027bc0b5c686f972358b8e9a
[ "MIT" ]
6
2018-10-15T00:00:13.000Z
2019-12-24T18:30:29.000Z
src/lib.rs
pirogoeth/unix-named-pipe
78bd325c82d13451027bc0b5c686f972358b8e9a
[ "MIT" ]
1
2018-10-13T19:30:57.000Z
2018-10-18T05:14:40.000Z
src/lib.rs
pirogoeth/unix-named-pipe
78bd325c82d13451027bc0b5c686f972358b8e9a
[ "MIT" ]
3
2018-12-02T19:10:19.000Z
2021-12-26T14:50:16.000Z
//! Provides utilities for working with Unix named pipes / FIFOs. extern crate errno; extern crate libc; use libc::{c_int, mkfifo, mode_t, EACCES, EEXIST, ENOENT}; use std::ffi::CString; use std::fs::{File, OpenOptions}; use std::io; use std::os::unix::fs::OpenOptionsExt; use std::path::Path; mod ext; pub use self::ext::*; /// Creates a new named pipe at the path given as `path`. /// Pipe will be created with mode `mode` if given, else `0o644` will be used. /// /// # Examples /// /// Without an explicit mode: /// /// ``` /// # extern crate unix_named_pipe; /// # use std::fs; /// # let file_name = "/tmp/fifo.0"; /// unix_named_pipe::create(file_name, None).expect("could not create fifo"); /// # fs::remove_file(file_name).expect("could not remove fifo"); /// ``` /// /// With an explicit mode: /// /// ``` /// # extern crate unix_named_pipe; /// # use std::fs; /// # let file_name = "/tmp/fifo.1"; /// unix_named_pipe::create(file_name, Some(0o740)).expect("could not create fifo"); /// # fs::remove_file(file_name).unwrap(); /// ``` pub fn create<P: AsRef<Path>>(path: P, mode: Option<u32>) -> io::Result<()> { let path = CString::new(path.as_ref().to_str().unwrap())?; let mode = mode.unwrap_or(0o644); let result: c_int = unsafe { mkfifo(path.as_ptr(), mode as mode_t) }; let result: i32 = result.into(); if result == 0 { return Ok(()); } let error = errno::errno(); match error.0 { EACCES => { return Err(io::Error::new( io::ErrorKind::PermissionDenied, format!("could not open {:?}: {}", path, error), )); } EEXIST => { return Err(io::Error::new( io::ErrorKind::AlreadyExists, format!("could not open {:?}: {}", path, error), )); } ENOENT => { return Err(io::Error::new( io::ErrorKind::NotFound, format!("could not open {:?}: {}", path, error), )); } _ => { return Err(io::Error::new( io::ErrorKind::Other, format!("could not open {:?}: {}", path, error), )); } } } /// Opens a named pipe for reading. The file is opened for non-blocking reads /// a la `libc`'s `O_NONBLOCK`. /// /// # Examples /// /// ``` /// # extern crate unix_named_pipe; /// # use std::fs; /// # let file_name = "/tmp/fifo.2"; /// # unix_named_pipe::create(file_name, None).unwrap(); /// let file = unix_named_pipe::open_read(file_name).expect("could not open fifo for reading"); /// # fs::remove_file(file_name).unwrap(); /// ``` pub fn open_read<P: AsRef<Path>>(path: P) -> io::Result<File> { OpenOptions::new() .read(true) .custom_flags(libc::O_NONBLOCK) .open(path) } /// Opens a named pipe for writing. The file is opened for non-blocking writes /// a la `libc`'s `O_NONBLOCK`. /// /// # Examples /// /// ``` /// # extern crate unix_named_pipe; /// # use std::fs; /// # let file_name = "/tmp/fifo.3"; /// # unix_named_pipe::create(file_name, Some(0o777)).unwrap();; /// # let read = unix_named_pipe::open_read(file_name).unwrap(); /// let file = unix_named_pipe::open_write(file_name).expect("could not open fifo for writing"); /// # fs::remove_file(file_name).unwrap(); /// ``` /// /// # Errors /// /// - If there is no pipe receiver configured when `open_write` is called, /// `Err(io::ErrorKind::Other)` will be returned with /// `code = 6, message = "Device not configured"`. pub fn open_write<P: AsRef<Path>>(path: P) -> io::Result<File> { OpenOptions::new() .write(true) .append(true) .custom_flags(libc::O_NONBLOCK) .open(path) } #[cfg(test)] mod tests { extern crate fs2; use super::*; use fs2::FileExt; use std::fs; use std::io::{self, Error, ErrorKind, Read, Write}; fn lock_active_test() -> io::Result<fs::File> { let file = File::create("/tmp/unix-named-pipe_tests.lock")?; file.lock_exclusive()?; Ok(file) } #[test] fn create_new_pipe() { let lock = lock_active_test().unwrap(); let filename = "/tmp/pipe"; let _ = create(filename, None).expect("could not create pipe"); fs::remove_file(filename).expect("could not remove test pipe"); lock.unlock().unwrap(); } #[test] fn create_pipe_eexists() { let lock = lock_active_test().unwrap(); let filename = "/tmp/pipe"; fs::write(filename, "").expect("could not write test file"); let pipe = create(filename, None); assert_eq!(pipe.is_err(), true); let err: Error = pipe.unwrap_err(); assert_eq!(err.kind(), ErrorKind::AlreadyExists); fs::remove_file(filename).expect("could not remove test file"); lock.unlock().unwrap(); } #[test] fn create_pipe_enoent() { let filename = "/notadir/pipe"; let pipe = create(filename, None); assert_eq!(pipe.is_err(), true); let err: Error = pipe.unwrap_err(); assert_eq!(err.kind(), ErrorKind::NotFound); } #[test] fn open_pipe_read() { let lock = lock_active_test().unwrap(); let filename = "/tmp/test.pipe"; let _ = create(filename, None).expect("could not make test pipe"); let contents: [u8; 4] = [0xca, 0xfe, 0xba, 0xbe]; let mut actual: [u8; 4] = [0; 4]; // Create a reader first let mut read_file = open_read(filename).expect("could not open test pipe for reading"); // Write some data to the pipe { let mut write_file = open_write(filename).expect("could not open test pipe for writing"); write_file .write(&contents) .expect("could not write test data to pipe"); write_file.flush().expect("could not flush test pipe"); } // Read some data from the pipe read_file .read_exact(&mut actual) .expect("could not read test data from pipe"); assert_eq!(contents, actual); fs::remove_file(filename).expect("could not remove test file"); lock.unlock().unwrap(); } }
29.370892
96
0.570013
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn create_new_pipe() {\n let lock = lock_active_test().unwrap();\n\n let filename = \"/tmp/pipe\";\n let _ = create(filename, None).expect(\"could not create pipe\");\n\n fs::remove_file(filename).expect(\"could not remove test pipe\");\n lock.unlock().unwrap();\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn create_pipe_eexists() {\n let lock = lock_active_test().unwrap();\n\n let filename = \"/tmp/pipe\";\n fs::write(filename, \"\").expect(\"could not write test file\");\n\n let pipe = create(filename, None);\n assert_eq!(pipe.is_err(), true);\n\n let err: Error = pipe.unwrap_err();\n assert_eq!(err.kind(), ErrorKind::AlreadyExists);\n\n fs::remove_file(filename).expect(\"could not remove test file\");\n lock.unlock().unwrap();\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn create_pipe_enoent() {\n let filename = \"/notadir/pipe\";\n let pipe = create(filename, None);\n assert_eq!(pipe.is_err(), true);\n\n let err: Error = pipe.unwrap_err();\n assert_eq!(err.kind(), ErrorKind::NotFound);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn open_pipe_read() {\n let lock = lock_active_test().unwrap();\n\n let filename = \"/tmp/test.pipe\";\n let _ = create(filename, None).expect(\"could not make test pipe\");\n\n let contents: [u8; 4] = [0xca, 0xfe, 0xba, 0xbe];\n let mut actual: [u8; 4] = [0; 4];\n\n // Create a reader first\n let mut read_file = open_read(filename).expect(\"could not open test pipe for reading\");\n\n // Write some data to the pipe\n {\n let mut write_file =\n open_write(filename).expect(\"could not open test pipe for writing\");\n write_file\n .write(&contents)\n .expect(\"could not write test data to pipe\");\n write_file.flush().expect(\"could not flush test pipe\");\n }\n\n // Read some data from the pipe\n read_file\n .read_exact(&mut actual)\n .expect(\"could not read test data from pipe\");\n assert_eq!(contents, actual);\n\n fs::remove_file(filename).expect(\"could not remove test file\");\n lock.unlock().unwrap();\n }\n}" ]
f70ac7c6614c85b2df4167ffe78066d0e898d8cc
14,794
rs
Rust
compiler/src/util.rs
aviansie-ben/yet-another-static-java-compiler
9207c1579e0f555ff1cd83a09b380f7415270d36
[ "MIT" ]
2
2020-09-28T18:13:22.000Z
2020-10-14T20:32:45.000Z
compiler/src/util.rs
aviansie-ben/yet-another-static-java-compiler
9207c1579e0f555ff1cd83a09b380f7415270d36
[ "MIT" ]
null
null
null
compiler/src/util.rs
aviansie-ben/yet-another-static-java-compiler
9207c1579e0f555ff1cd83a09b380f7415270d36
[ "MIT" ]
null
null
null
use std::cell::UnsafeCell; use std::collections::HashMap; use std::hash::Hash; use std::iter; use std::marker::PhantomData; use std::ops::{Deref, DerefMut}; fn into_pos(i: usize) -> (usize, u8) { (i >> 3, (i & 0x7) as u8) } fn from_pos(byte: usize, bit: u8) -> usize { (byte << 3) + (bit as usize) } pub trait BitVecIndex { fn into_index(self) -> usize; fn from_index(i: usize) -> Self; } impl BitVecIndex for usize { fn into_index(self) -> usize { self } fn from_index(i: usize) -> Self { i } } #[derive(PartialEq, Eq)] struct BitVecInternal { bits: Vec<u8> } impl BitVecInternal { pub fn new() -> Self { BitVecInternal { bits: vec![] } } pub fn clear(&mut self) { for b in self.bits.iter_mut() { *b = 0; }; } pub fn get(&self, i: usize) -> bool { let (byte, bit) = into_pos(i); ((self.bits.get(byte).copied().unwrap_or(0) >> bit) & 1) != 0 } pub fn set(&mut self, i: usize, val: bool) -> bool { let (byte, bit) = into_pos(i); if self.bits.len() <= byte { self.bits.extend(iter::repeat(0).take(byte + 1 - self.bits.len())); }; let old = self.bits[byte]; let new = if val { old | (1 << bit) } else { old & !(1 << bit) }; self.bits[byte] = new; ((old >> bit) & 1) != 0 } pub fn union(&mut self, other: &Self) -> bool { let mut modified = false; for (b1, b2) in self.bits.iter_mut().zip(other.bits.iter()) { let b = *b1 | *b2; modified = modified || b != *b1; *b1 = b; }; if other.bits.len() > self.bits.len() { self.bits.reserve(other.bits.len() - self.bits.len()); for b in other.bits[self.bits.len()..].iter() { modified = modified || *b != 0; self.bits.push(*b); }; }; modified } pub fn intersect(&mut self, other: &Self) -> bool { let mut modified = false; for (b1, b2) in self.bits.iter_mut().zip(other.bits.iter()) { let b = *b1 & *b2; modified = modified || b != *b1; *b1 = b; }; if self.bits.len() > other.bits.len() { for b in self.bits[other.bits.len()..].iter_mut() { modified = modified || *b != 0; *b = 0; }; }; modified } pub fn difference(&mut self, other: &Self) -> bool { let mut modified = false; for (b1, b2) in self.bits.iter_mut().zip(other.bits.iter()) { let b = *b1 & !*b2; modified = modified || b != *b1; *b1 = b; }; modified } pub fn iter<'a>(&'a self) -> impl Iterator<Item=usize> + 'a { BitVecIter(self, 0) } } impl Clone for BitVecInternal { fn clone(&self) -> Self { BitVecInternal { bits: self.bits.clone() } } fn clone_from(&mut self, other: &BitVecInternal) { self.bits.clone_from(&other.bits); } } struct BitVecIter<'a>(&'a BitVecInternal, usize); impl <'a> Iterator for BitVecIter<'a> { type Item = usize; fn next(&mut self) -> Option<usize> { let (mut byte, bit) = into_pos(self.1); if byte >= self.0.bits.len() { return None; }; let tz = (self.0.bits[byte] >> bit).trailing_zeros(); if tz != 8 { self.1 += (tz + 1) as usize; return Some(self.1 - 1); }; for b in self.0.bits[(byte + 1)..].iter().copied() { byte += 1; if b != 0 { let tz = b.trailing_zeros(); let i = from_pos(byte, tz as u8); self.1 = i + 1; return Some(i); }; }; self.1 = !0; None } } #[derive(PartialEq, Eq)] pub struct BitVec<T: BitVecIndex> { bits: BitVecInternal, _data: PhantomData<fn (T) -> ()> } impl <T: BitVecIndex> BitVec<T> { pub fn new() -> Self { Self { bits: BitVecInternal::new(), _data: PhantomData } } pub fn clear(&mut self) { self.bits.clear(); } pub fn get(&self, i: T) -> bool { self.bits.get(i.into_index()) } pub fn set(&mut self, i: T, val: bool) -> bool { self.bits.set(i.into_index(), val) } pub fn union(&mut self, other: &Self) -> bool { self.bits.union(&other.bits) } pub fn intersect(&mut self, other: &Self) -> bool { self.bits.intersect(&other.bits) } pub fn difference(&mut self, other: &Self) -> bool { self.bits.difference(&other.bits) } pub fn iter<'a>(&'a self) -> impl Iterator<Item=T> + 'a { self.bits.iter().map(BitVecIndex::from_index) } } impl <T: BitVecIndex> Clone for BitVec<T> { fn clone(&self) -> Self { BitVec { bits: self.bits.clone(), _data: PhantomData } } fn clone_from(&mut self, other: &BitVec<T>) { self.bits.clone_from(&other.bits); } } enum LazyImpl<T, F: FnOnce () -> T> { Initialized(T), Uninitialized(F), Poisoned } pub struct Lazy<T, F: FnOnce () -> T>(UnsafeCell<LazyImpl<T, F>>); impl <T, F: FnOnce () -> T> Lazy<T, F> { pub fn new(f: F) -> Lazy<T, F> { Lazy(UnsafeCell::new(LazyImpl::Uninitialized(f))) } pub fn unwrap(l: Lazy<T, F>) -> Option<T> { match l.0.into_inner() { LazyImpl::Initialized(t) => Some(t), LazyImpl::Uninitialized(_) => None, LazyImpl::Poisoned => None } } pub fn force_init(l: &Lazy<T, F>) { unsafe { match *l.0.get() { LazyImpl::Initialized(_) => {}, LazyImpl::Uninitialized(_) => match std::mem::replace(&mut *l.0.get(), LazyImpl::Poisoned) { LazyImpl::Uninitialized(f) => { *l.0.get() = LazyImpl::Initialized(f()); }, _ => unreachable!() }, LazyImpl::Poisoned => panic!("Lazy constructor previously panicked") } } } } impl <T, F: FnOnce() -> T> Deref for Lazy<T, F> { type Target = T; fn deref(&self) -> &T { Lazy::force_init(self); unsafe { match *self.0.get() { LazyImpl::Initialized(ref t) => t, _ => unreachable!() } } } } impl <T, F: FnOnce() -> T> DerefMut for Lazy<T, F> { fn deref_mut(&mut self) -> &mut T { Lazy::force_init(self); unsafe { match *self.0.get() { LazyImpl::Initialized(ref mut t) => t, _ => unreachable!() } } } } impl <T, F: FnOnce() -> T> std::panic::UnwindSafe for Lazy<T, F> {} impl <T, F: FnOnce() -> T> std::panic::RefUnwindSafe for Lazy<T, F> {} pub struct FuncCache<T: Clone + Eq + Hash, U, F: FnMut (T) -> U> { func: F, cache: HashMap<T, U> } impl <T: Clone + Eq + Hash, U, F: FnMut (T) -> U> FuncCache<T, U, F> { pub fn new(func: F) -> Self { FuncCache { func, cache: HashMap::new() } } pub fn clear(&mut self) { self.cache.clear() } pub fn get(&mut self, t: T) -> &U { let func = &mut self.func; self.cache.entry(t.clone()).or_insert_with(|| { func(t) }) } } #[cfg(test)] mod tests { use std::cell::Cell; use std::ops::{Deref, DerefMut}; use itertools::Itertools; use super::{BitVec, FuncCache, Lazy}; #[test] fn test_get_set() { let mut bv: BitVec<usize> = BitVec::new(); assert!(!bv.get(0)); assert!(!bv.get(7)); assert!(!bv.get(8)); bv.set(7, true); assert!(!bv.get(0)); assert!(bv.get(7)); assert!(!bv.get(8)); bv.set(8, false); assert!(!bv.get(0)); assert!(bv.get(7)); assert!(!bv.get(8)); bv.set(8, true); assert!(!bv.get(0)); assert!(bv.get(7)); assert!(bv.get(8)); bv.set(7, false); assert!(!bv.get(0)); assert!(!bv.get(7)); assert!(bv.get(8)); bv.set(0, true); bv.set(8, false); assert!(bv.get(0)); assert!(!bv.get(7)); assert!(!bv.get(8)); } #[test] fn test_clear() { let mut bv: BitVec<usize> = BitVec::new(); bv.set(0, true); bv.set(8, true); bv.set(9, true); bv.clear(); assert!(!bv.get(0)); assert!(!bv.get(1)); assert!(!bv.get(8)); assert!(!bv.get(9)); } #[test] fn test_union() { let mut bv1: BitVec<usize> = BitVec::new(); let mut bv2: BitVec<usize> = BitVec::new(); bv1.set(0, true); bv1.set(1, true); bv2.set(1, true); bv2.set(3, true); bv1.union(&bv2); assert!(bv1.get(0)); assert!(bv1.get(1)); assert!(!bv1.get(2)); assert!(bv1.get(3)); } #[test] fn test_union_bv1_longer() { let mut bv1: BitVec<usize> = BitVec::new(); let mut bv2: BitVec<usize> = BitVec::new(); bv1.set(8, true); bv2.set(0, true); bv1.union(&bv2); assert!(bv1.get(0)); assert!(bv1.get(8)); } #[test] fn test_union_bv2_longer() { let mut bv1: BitVec<usize> = BitVec::new(); let mut bv2: BitVec<usize> = BitVec::new(); bv1.set(0, true); bv2.set(8, true); bv1.union(&bv2); assert!(bv1.get(0)); assert!(bv1.get(8)); } #[test] fn test_intersect() { let mut bv1: BitVec<usize> = BitVec::new(); let mut bv2: BitVec<usize> = BitVec::new(); bv1.set(0, true); bv1.set(1, true); bv2.set(1, true); bv2.set(3, true); bv1.intersect(&bv2); assert!(!bv1.get(0)); assert!(bv1.get(1)); assert!(!bv1.get(2)); assert!(!bv1.get(3)); } #[test] fn test_intersect_bv1_longer() { let mut bv1: BitVec<usize> = BitVec::new(); let mut bv2: BitVec<usize> = BitVec::new(); bv1.set(8, true); bv2.set(0, true); bv1.intersect(&bv2); assert!(!bv1.get(0)); assert!(!bv1.get(8)); } #[test] fn test_intersect_bv2_longer() { let mut bv1: BitVec<usize> = BitVec::new(); let mut bv2: BitVec<usize> = BitVec::new(); bv1.set(0, true); bv2.set(8, true); bv1.intersect(&bv2); assert!(!bv1.get(0)); assert!(!bv1.get(8)); } #[test] fn test_difference() { let mut bv1: BitVec<usize> = BitVec::new(); let mut bv2: BitVec<usize> = BitVec::new(); bv1.set(0, true); bv1.set(1, true); bv2.set(1, true); bv2.set(3, true); bv1.union(&bv2); assert!(bv1.get(0)); assert!(bv1.get(1)); assert!(!bv1.get(2)); assert!(bv1.get(3)); } #[test] fn test_difference_bv1_longer() { let mut bv1: BitVec<usize> = BitVec::new(); let mut bv2: BitVec<usize> = BitVec::new(); bv1.set(8, true); bv2.set(0, true); bv1.difference(&bv2); assert!(!bv1.get(0)); assert!(bv1.get(8)); } #[test] fn test_difference_bv2_longer() { let mut bv1: BitVec<usize> = BitVec::new(); let mut bv2: BitVec<usize> = BitVec::new(); bv1.set(0, true); bv2.set(8, true); bv1.difference(&bv2); assert!(bv1.get(0)); assert!(!bv1.get(8)); } #[test] fn test_iter() { let mut bv: BitVec<usize> = BitVec::new(); bv.set(0, true); bv.set(7, true); bv.set(8, true); bv.set(9, true); bv.set(16, true); assert_eq!( bv.iter().collect_vec(), vec![0, 7, 8, 9, 16] ); } #[test] fn test_lazy_unused_no_side_effects() { let mut run = false; let _ = Lazy::new(|| { run = true; () }); assert!(!run); } #[test] fn test_lazy_deref() { let lazy = Lazy::new(|| 100i32); assert_eq!(100, *Deref::deref(&lazy)); assert_eq!(100, *Deref::deref(&lazy)); } #[test] fn test_lazy_deref_mut() { let mut lazy = Lazy::new(|| 100i32); assert_eq!(100, *DerefMut::deref_mut(&mut lazy)); assert_eq!(100, *DerefMut::deref_mut(&mut lazy)); *lazy = 200i32; assert_eq!(200, *Deref::deref(&lazy)); assert_eq!(200, *DerefMut::deref_mut(&mut lazy)); } fn poisoned_lazy() -> Lazy<i32, impl FnOnce() -> i32> { let lazy = Lazy::new(|| panic!("Fake panic for testing")); let _ = std::panic::catch_unwind(|| Lazy::force_init(&lazy)); lazy } #[test] #[should_panic(expected = "Lazy constructor previously panicked")] fn test_lazy_poison_deref() { let _ = Deref::deref(&poisoned_lazy()); } #[test] #[should_panic(expected = "Lazy constructor previously panicked")] fn test_lazy_poison_deref_mut() { let _ = DerefMut::deref_mut(&mut poisoned_lazy()); } #[test] fn test_lazy_unwrap() { assert_eq!(None, Lazy::unwrap(Lazy::new(|| 100i32))); assert_eq!(Some(100), Lazy::unwrap({ let lazy = Lazy::new(|| 100i32); Lazy::force_init(&lazy); lazy })); assert_eq!(None, Lazy::unwrap(poisoned_lazy())); } #[test] fn test_cache_basic() { let mut cache = FuncCache::new(|x| x); assert_eq!(*cache.get(1), 1); assert_eq!(*cache.get(2), 2); assert_eq!(*cache.get(3), 3); } #[test] fn test_cache_multicall() { let num_calls = Cell::new(0); let mut cache = FuncCache::new(|_| { num_calls.set(num_calls.get() + 1); num_calls.get() - 1 }); assert_eq!(*cache.get(1), 0); assert_eq!(*cache.get(1), 0); assert_eq!(num_calls.get(), 1); assert_eq!(*cache.get(0), 1); assert_eq!(*cache.get(0), 1); assert_eq!(num_calls.get(), 2); assert_eq!(*cache.get(1), 0); assert_eq!(num_calls.get(), 2); } #[test] fn test_cache_clear() { let num_calls = Cell::new(0); let mut cache = FuncCache::new(|_| { num_calls.set(num_calls.get() + 1); num_calls.get() - 1 }); assert_eq!(*cache.get(1), 0); assert_eq!(num_calls.get(), 1); cache.clear(); assert_eq!(*cache.get(1), 1); assert_eq!(num_calls.get(), 2); } }
23.371248
112
0.485873
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_get_set() {\n let mut bv: BitVec<usize> = BitVec::new();\n\n assert!(!bv.get(0));\n assert!(!bv.get(7));\n assert!(!bv.get(8));\n\n bv.set(7, true);\n\n assert!(!bv.get(0));\n assert!(bv.get(7));\n assert!(!bv.get(8));\n\n bv.set(8, false);\n\n assert!(!bv.get(0));\n assert!(bv.get(7));\n assert!(!bv.get(8));\n\n bv.set(8, true);\n\n assert!(!bv.get(0));\n assert!(bv.get(7));\n assert!(bv.get(8));\n\n bv.set(7, false);\n\n assert!(!bv.get(0));\n assert!(!bv.get(7));\n assert!(bv.get(8));\n\n bv.set(0, true);\n bv.set(8, false);\n\n assert!(bv.get(0));\n assert!(!bv.get(7));\n assert!(!bv.get(8));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_clear() {\n let mut bv: BitVec<usize> = BitVec::new();\n\n bv.set(0, true);\n bv.set(8, true);\n bv.set(9, true);\n bv.clear();\n\n assert!(!bv.get(0));\n assert!(!bv.get(1));\n assert!(!bv.get(8));\n assert!(!bv.get(9));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_union() {\n let mut bv1: BitVec<usize> = BitVec::new();\n let mut bv2: BitVec<usize> = BitVec::new();\n\n bv1.set(0, true);\n bv1.set(1, true);\n bv2.set(1, true);\n bv2.set(3, true);\n\n bv1.union(&bv2);\n\n assert!(bv1.get(0));\n assert!(bv1.get(1));\n assert!(!bv1.get(2));\n assert!(bv1.get(3));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_union_bv1_longer() {\n let mut bv1: BitVec<usize> = BitVec::new();\n let mut bv2: BitVec<usize> = BitVec::new();\n\n bv1.set(8, true);\n bv2.set(0, true);\n\n bv1.union(&bv2);\n\n assert!(bv1.get(0));\n assert!(bv1.get(8));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_union_bv2_longer() {\n let mut bv1: BitVec<usize> = BitVec::new();\n let mut bv2: BitVec<usize> = BitVec::new();\n\n bv1.set(0, true);\n bv2.set(8, true);\n\n bv1.union(&bv2);\n\n assert!(bv1.get(0));\n assert!(bv1.get(8));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_intersect() {\n let mut bv1: BitVec<usize> = BitVec::new();\n let mut bv2: BitVec<usize> = BitVec::new();\n\n bv1.set(0, true);\n bv1.set(1, true);\n bv2.set(1, true);\n bv2.set(3, true);\n\n bv1.intersect(&bv2);\n\n assert!(!bv1.get(0));\n assert!(bv1.get(1));\n assert!(!bv1.get(2));\n assert!(!bv1.get(3));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_intersect_bv1_longer() {\n let mut bv1: BitVec<usize> = BitVec::new();\n let mut bv2: BitVec<usize> = BitVec::new();\n\n bv1.set(8, true);\n bv2.set(0, true);\n\n bv1.intersect(&bv2);\n\n assert!(!bv1.get(0));\n assert!(!bv1.get(8));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_intersect_bv2_longer() {\n let mut bv1: BitVec<usize> = BitVec::new();\n let mut bv2: BitVec<usize> = BitVec::new();\n\n bv1.set(0, true);\n bv2.set(8, true);\n\n bv1.intersect(&bv2);\n\n assert!(!bv1.get(0));\n assert!(!bv1.get(8));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_difference() {\n let mut bv1: BitVec<usize> = BitVec::new();\n let mut bv2: BitVec<usize> = BitVec::new();\n\n bv1.set(0, true);\n bv1.set(1, true);\n bv2.set(1, true);\n bv2.set(3, true);\n\n bv1.union(&bv2);\n\n assert!(bv1.get(0));\n assert!(bv1.get(1));\n assert!(!bv1.get(2));\n assert!(bv1.get(3));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_difference_bv1_longer() {\n let mut bv1: BitVec<usize> = BitVec::new();\n let mut bv2: BitVec<usize> = BitVec::new();\n\n bv1.set(8, true);\n bv2.set(0, true);\n\n bv1.difference(&bv2);\n\n assert!(!bv1.get(0));\n assert!(bv1.get(8));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_difference_bv2_longer() {\n let mut bv1: BitVec<usize> = BitVec::new();\n let mut bv2: BitVec<usize> = BitVec::new();\n\n bv1.set(0, true);\n bv2.set(8, true);\n\n bv1.difference(&bv2);\n\n assert!(bv1.get(0));\n assert!(!bv1.get(8));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_iter() {\n let mut bv: BitVec<usize> = BitVec::new();\n\n bv.set(0, true);\n bv.set(7, true);\n bv.set(8, true);\n bv.set(9, true);\n bv.set(16, true);\n\n assert_eq!(\n bv.iter().collect_vec(),\n vec![0, 7, 8, 9, 16]\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_unused_no_side_effects() {\n let mut run = false;\n let _ = Lazy::new(|| { run = true; () });\n\n assert!(!run);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_deref() {\n let lazy = Lazy::new(|| 100i32);\n\n assert_eq!(100, *Deref::deref(&lazy));\n assert_eq!(100, *Deref::deref(&lazy));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_deref_mut() {\n let mut lazy = Lazy::new(|| 100i32);\n\n assert_eq!(100, *DerefMut::deref_mut(&mut lazy));\n assert_eq!(100, *DerefMut::deref_mut(&mut lazy));\n\n *lazy = 200i32;\n\n assert_eq!(200, *Deref::deref(&lazy));\n assert_eq!(200, *DerefMut::deref_mut(&mut lazy));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_poison_deref() {\n let _ = Deref::deref(&poisoned_lazy());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_poison_deref_mut() {\n let _ = DerefMut::deref_mut(&mut poisoned_lazy());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_lazy_unwrap() {\n assert_eq!(None, Lazy::unwrap(Lazy::new(|| 100i32)));\n assert_eq!(Some(100), Lazy::unwrap({ let lazy = Lazy::new(|| 100i32); Lazy::force_init(&lazy); lazy }));\n assert_eq!(None, Lazy::unwrap(poisoned_lazy()));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_cache_basic() {\n let mut cache = FuncCache::new(|x| x);\n\n assert_eq!(*cache.get(1), 1);\n assert_eq!(*cache.get(2), 2);\n assert_eq!(*cache.get(3), 3);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_cache_multicall() {\n let num_calls = Cell::new(0);\n let mut cache = FuncCache::new(|_| {\n num_calls.set(num_calls.get() + 1);\n num_calls.get() - 1\n });\n\n assert_eq!(*cache.get(1), 0);\n assert_eq!(*cache.get(1), 0);\n assert_eq!(num_calls.get(), 1);\n\n assert_eq!(*cache.get(0), 1);\n assert_eq!(*cache.get(0), 1);\n assert_eq!(num_calls.get(), 2);\n\n assert_eq!(*cache.get(1), 0);\n assert_eq!(num_calls.get(), 2);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_cache_clear() {\n let num_calls = Cell::new(0);\n let mut cache = FuncCache::new(|_| {\n num_calls.set(num_calls.get() + 1);\n num_calls.get() - 1\n });\n\n assert_eq!(*cache.get(1), 0);\n assert_eq!(num_calls.get(), 1);\n\n cache.clear();\n\n assert_eq!(*cache.get(1), 1);\n assert_eq!(num_calls.get(), 2);\n }\n}" ]
f70ada07f6383a0db7eb2213219007814f903710
317
rs
Rust
tests/integration_test.rs
HaoXuan40404/num-primes
36e83873f89c8f082a3c11ecb6cc2c6b52487a45
[ "Apache-2.0", "MIT" ]
3
2021-09-27T07:47:46.000Z
2021-12-26T14:44:27.000Z
tests/integration_test.rs
HaoXuan40404/num-primes
36e83873f89c8f082a3c11ecb6cc2c6b52487a45
[ "Apache-2.0", "MIT" ]
3
2021-09-15T13:29:37.000Z
2021-12-23T08:52:37.000Z
tests/integration_test.rs
HaoXuan40404/num-primes
36e83873f89c8f082a3c11ecb6cc2c6b52487a45
[ "Apache-2.0", "MIT" ]
6
2021-09-15T20:54:22.000Z
2021-12-20T04:07:51.000Z
use num_primes::{Generator,Verification}; #[cfg(test)] #[test] fn generate_all(){ let prime = Generator::new_prime(512); let _uint = Generator::new_uint(1024); // p = 2q + 1 || where p is safe prime let _safe_prime = Generator::safe_prime(64); let _ver: bool = Verification::is_prime(&prime); }
24.384615
52
0.659306
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn generate_all(){\n let prime = Generator::new_prime(512);\n let _uint = Generator::new_uint(1024);\n\n // p = 2q + 1 || where p is safe prime\n let _safe_prime = Generator::safe_prime(64);\n\n let _ver: bool = Verification::is_prime(&prime);\n}\n}" ]
f70ae028a39cf6a8029824ff7af69f52ace580c5
12,705
rs
Rust
vulkano/src/command_buffer/submit/queue_submit.rs
stephan-cr/vulkano
65254ed10c81694e45555e6044851b718ae200c0
[ "Apache-2.0", "MIT" ]
null
null
null
vulkano/src/command_buffer/submit/queue_submit.rs
stephan-cr/vulkano
65254ed10c81694e45555e6044851b718ae200c0
[ "Apache-2.0", "MIT" ]
null
null
null
vulkano/src/command_buffer/submit/queue_submit.rs
stephan-cr/vulkano
65254ed10c81694e45555e6044851b718ae200c0
[ "Apache-2.0", "MIT" ]
null
null
null
// Copyright (c) 2017 The vulkano developers // Licensed under the Apache License, Version 2.0 // <LICENSE-APACHE or // https://www.apache.org/licenses/LICENSE-2.0> or the MIT // license <LICENSE-MIT or https://opensource.org/licenses/MIT>, // at your option. All files in the project carrying such // notice may not be copied, modified, or distributed except // according to those terms. use smallvec::SmallVec; use std::error; use std::fmt; use std::marker::PhantomData; use std::ptr; use command_buffer::sys::UnsafeCommandBuffer; use device::Queue; use sync::Fence; use sync::PipelineStages; use sync::Semaphore; use check_errors; use vk; use Error; use OomError; use SynchronizedVulkanObject; use VulkanObject; /// Prototype for a submission that executes command buffers. // TODO: example here #[derive(Debug)] pub struct SubmitCommandBufferBuilder<'a> { wait_semaphores: SmallVec<[vk::Semaphore; 16]>, destination_stages: SmallVec<[vk::PipelineStageFlags; 8]>, signal_semaphores: SmallVec<[vk::Semaphore; 16]>, command_buffers: SmallVec<[vk::CommandBuffer; 4]>, fence: vk::Fence, marker: PhantomData<&'a ()>, } impl<'a> SubmitCommandBufferBuilder<'a> { /// Builds a new empty `SubmitCommandBufferBuilder`. #[inline] pub fn new() -> SubmitCommandBufferBuilder<'a> { SubmitCommandBufferBuilder { wait_semaphores: SmallVec::new(), destination_stages: SmallVec::new(), signal_semaphores: SmallVec::new(), command_buffers: SmallVec::new(), fence: 0, marker: PhantomData, } } /// Returns true if this builder will signal a fence when submitted. /// /// # Example /// /// ``` /// use vulkano::command_buffer::submit::SubmitCommandBufferBuilder; /// use vulkano::sync::Fence; /// # let device: std::sync::Arc<vulkano::device::Device> = return; /// /// unsafe { /// let fence = Fence::from_pool(device.clone()).unwrap(); /// /// let mut builder = SubmitCommandBufferBuilder::new(); /// assert!(!builder.has_fence()); /// builder.set_fence_signal(&fence); /// assert!(builder.has_fence()); /// } /// ``` #[inline] pub fn has_fence(&self) -> bool { self.fence != 0 } /// Adds an operation that signals a fence after this submission ends. /// /// # Example /// /// ``` /// use std::time::Duration; /// use vulkano::command_buffer::submit::SubmitCommandBufferBuilder; /// use vulkano::sync::Fence; /// # let device: std::sync::Arc<vulkano::device::Device> = return; /// # let queue: std::sync::Arc<vulkano::device::Queue> = return; /// /// unsafe { /// let fence = Fence::from_pool(device.clone()).unwrap(); /// /// let mut builder = SubmitCommandBufferBuilder::new(); /// builder.set_fence_signal(&fence); /// /// builder.submit(&queue).unwrap(); /// /// // We must not destroy the fence before it is signaled. /// fence.wait(Some(Duration::from_secs(5))).unwrap(); /// } /// ``` /// /// # Safety /// /// - The fence must not be signaled at the time when you call `submit()`. /// /// - If you use the fence for multiple submissions, only one at a time must be executed by the /// GPU. In other words, you must submit one, wait for the fence to be signaled, then reset /// the fence, and then only submit the second. /// /// - If you submit this builder, the fence must be kept alive until it is signaled by the GPU. /// Destroying the fence earlier is an undefined behavior. /// /// - The fence, command buffers, and semaphores must all belong to the same device. /// #[inline] pub unsafe fn set_fence_signal(&mut self, fence: &'a Fence) { self.fence = fence.internal_object(); } /// Adds a semaphore to be waited upon before the command buffers are executed. /// /// Only the given `stages` of the command buffers added afterwards will wait upon /// the semaphore. Other stages not included in `stages` can execute before waiting. /// /// # Safety /// /// - The stages must be supported by the device. /// /// - If you submit this builder, the semaphore must be kept alive until you are guaranteed /// that the GPU has at least started executing the command buffers. /// /// - If you submit this builder, no other queue must be waiting on these semaphores. In other /// words, each semaphore signal can only correspond to one semaphore wait. /// /// - If you submit this builder, the semaphores must be signaled when the queue execution /// reaches this submission, or there must be one or more submissions in queues that are /// going to signal these semaphores. In other words, you must not block the queue with /// semaphores that can't get signaled. /// /// - The fence, command buffers, and semaphores must all belong to the same device. /// #[inline] pub unsafe fn add_wait_semaphore(&mut self, semaphore: &'a Semaphore, stages: PipelineStages) { debug_assert!(stages.into_vulkan_bits() != 0); // TODO: debug assert that the device supports the stages self.wait_semaphores.push(semaphore.internal_object()); self.destination_stages.push(stages.into_vulkan_bits()); } /// Adds a command buffer that is executed as part of this command. /// /// The command buffers are submitted in the order in which they are added. /// /// # Safety /// /// - If you submit this builder, the command buffer must be kept alive until you are /// guaranteed that the GPU has finished executing it. /// /// - Any calls to vkCmdSetEvent, vkCmdResetEvent or vkCmdWaitEvents that have been recorded /// into the command buffer must not reference any VkEvent that is referenced by any of /// those commands that is pending execution on another queue. /// TODO: rephrase ^ ? /// /// - The fence, command buffers, and semaphores must all belong to the same device. /// /// TODO: more here /// #[inline] pub unsafe fn add_command_buffer(&mut self, command_buffer: &'a UnsafeCommandBuffer) { self.command_buffers.push(command_buffer.internal_object()); } /// Returns the number of semaphores to signal. /// /// In other words, this is the number of times `add_signal_semaphore` has been called. #[inline] pub fn num_signal_semaphores(&self) -> usize { self.signal_semaphores.len() } /// Adds a semaphore that is going to be signaled at the end of the submission. /// /// # Safety /// /// - If you submit this builder, the semaphore must be kept alive until you are guaranteed /// that the GPU has finished executing this submission. /// /// - The semaphore must be in the unsignaled state when queue execution reaches this /// submission. /// /// - The fence, command buffers, and semaphores must all belong to the same device. /// #[inline] pub unsafe fn add_signal_semaphore(&mut self, semaphore: &'a Semaphore) { self.signal_semaphores.push(semaphore.internal_object()); } /// Submits the command buffer to the given queue. /// /// > **Note**: This is an expensive operation, so you may want to merge as many builders as /// > possible together and avoid submitting them one by one. /// pub fn submit(self, queue: &Queue) -> Result<(), SubmitCommandBufferError> { unsafe { let vk = queue.device().pointers(); let queue = queue.internal_object_guard(); debug_assert_eq!(self.wait_semaphores.len(), self.destination_stages.len()); let batch = vk::SubmitInfo { sType: vk::STRUCTURE_TYPE_SUBMIT_INFO, pNext: ptr::null(), waitSemaphoreCount: self.wait_semaphores.len() as u32, pWaitSemaphores: self.wait_semaphores.as_ptr(), pWaitDstStageMask: self.destination_stages.as_ptr(), commandBufferCount: self.command_buffers.len() as u32, pCommandBuffers: self.command_buffers.as_ptr(), signalSemaphoreCount: self.signal_semaphores.len() as u32, pSignalSemaphores: self.signal_semaphores.as_ptr(), }; check_errors(vk.QueueSubmit(*queue, 1, &batch, self.fence))?; Ok(()) } } /// Merges this builder with another builder. /// /// # Panic /// /// Panics if both builders have a fence already set. // TODO: create multiple batches instead pub fn merge(mut self, other: Self) -> Self { assert!( self.fence == 0 || other.fence == 0, "Can't merge two queue submits that both have a fence" ); self.wait_semaphores.extend(other.wait_semaphores); self.destination_stages.extend(other.destination_stages); // TODO: meh? will be solved if we submit multiple batches self.signal_semaphores.extend(other.signal_semaphores); self.command_buffers.extend(other.command_buffers); if self.fence == 0 { self.fence = other.fence; } self } } /// Error that can happen when submitting the prototype. #[derive(Copy, Clone, Debug, PartialEq, Eq)] #[repr(u32)] pub enum SubmitCommandBufferError { /// Not enough memory. OomError(OomError), /// The connection to the device has been lost. DeviceLost, } impl error::Error for SubmitCommandBufferError { #[inline] fn cause(&self) -> Option<&dyn error::Error> { match *self { SubmitCommandBufferError::OomError(ref err) => Some(err), _ => None, } } } impl fmt::Display for SubmitCommandBufferError { #[inline] fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!( fmt, "{}", match *self { SubmitCommandBufferError::OomError(_) => "not enough memory", SubmitCommandBufferError::DeviceLost => "the connection to the device has been lost", } ) } } impl From<Error> for SubmitCommandBufferError { #[inline] fn from(err: Error) -> SubmitCommandBufferError { match err { err @ Error::OutOfHostMemory => SubmitCommandBufferError::OomError(OomError::from(err)), err @ Error::OutOfDeviceMemory => { SubmitCommandBufferError::OomError(OomError::from(err)) } Error::DeviceLost => SubmitCommandBufferError::DeviceLost, _ => panic!("unexpected error: {:?}", err), } } } #[cfg(test)] mod tests { use super::*; use std::time::Duration; use sync::Fence; #[test] fn empty_submit() { let (device, queue) = gfx_dev_and_queue!(); let builder = SubmitCommandBufferBuilder::new(); builder.submit(&queue).unwrap(); } #[test] fn signal_fence() { unsafe { let (device, queue) = gfx_dev_and_queue!(); let fence = Fence::alloc(device.clone()).unwrap(); assert!(!fence.ready().unwrap()); let mut builder = SubmitCommandBufferBuilder::new(); builder.set_fence_signal(&fence); builder.submit(&queue).unwrap(); fence.wait(Some(Duration::from_secs(5))).unwrap(); assert!(fence.ready().unwrap()); } } #[test] fn has_fence() { unsafe { let (device, queue) = gfx_dev_and_queue!(); let fence = Fence::alloc(device.clone()).unwrap(); let mut builder = SubmitCommandBufferBuilder::new(); assert!(!builder.has_fence()); builder.set_fence_signal(&fence); assert!(builder.has_fence()); } } #[test] fn merge_both_have_fences() { unsafe { let (device, _) = gfx_dev_and_queue!(); let fence1 = Fence::alloc(device.clone()).unwrap(); let fence2 = Fence::alloc(device.clone()).unwrap(); let mut builder1 = SubmitCommandBufferBuilder::new(); builder1.set_fence_signal(&fence1); let mut builder2 = SubmitCommandBufferBuilder::new(); builder2.set_fence_signal(&fence2); assert_should_panic!("Can't merge two queue submits that both have a fence", { let _ = builder1.merge(builder2); }); } } }
34.808219
124
0.612908
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn empty_submit() {\n let (device, queue) = gfx_dev_and_queue!();\n let builder = SubmitCommandBufferBuilder::new();\n builder.submit(&queue).unwrap();\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn signal_fence() {\n unsafe {\n let (device, queue) = gfx_dev_and_queue!();\n\n let fence = Fence::alloc(device.clone()).unwrap();\n assert!(!fence.ready().unwrap());\n\n let mut builder = SubmitCommandBufferBuilder::new();\n builder.set_fence_signal(&fence);\n\n builder.submit(&queue).unwrap();\n fence.wait(Some(Duration::from_secs(5))).unwrap();\n assert!(fence.ready().unwrap());\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn has_fence() {\n unsafe {\n let (device, queue) = gfx_dev_and_queue!();\n\n let fence = Fence::alloc(device.clone()).unwrap();\n\n let mut builder = SubmitCommandBufferBuilder::new();\n assert!(!builder.has_fence());\n builder.set_fence_signal(&fence);\n assert!(builder.has_fence());\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn merge_both_have_fences() {\n unsafe {\n let (device, _) = gfx_dev_and_queue!();\n\n let fence1 = Fence::alloc(device.clone()).unwrap();\n let fence2 = Fence::alloc(device.clone()).unwrap();\n\n let mut builder1 = SubmitCommandBufferBuilder::new();\n builder1.set_fence_signal(&fence1);\n let mut builder2 = SubmitCommandBufferBuilder::new();\n builder2.set_fence_signal(&fence2);\n\n assert_should_panic!(\"Can't merge two queue submits that both have a fence\", {\n let _ = builder1.merge(builder2);\n });\n }\n }\n}" ]
f70aefdc6ff193308fc2c53b58c13d38ef53b42b
4,752
rs
Rust
src/bls12_381/fq12.rs
nearprotocol/pairing
f009a9f54c1c1149cea4ee3e6e58ed71d72bb2e9
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
src/bls12_381/fq12.rs
nearprotocol/pairing
f009a9f54c1c1149cea4ee3e6e58ed71d72bb2e9
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
src/bls12_381/fq12.rs
nearprotocol/pairing
f009a9f54c1c1149cea4ee3e6e58ed71d72bb2e9
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
use rand::{Rng}; use {Field, Rand}; use super::fq6::Fq6; use super::fq2::Fq2; use super::fq::FROBENIUS_COEFF_FQ12_C1; /// An element of Fq12, represented by c0 + c1 * w. #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub struct Fq12 { pub c0: Fq6, pub c1: Fq6, } impl ::std::fmt::Display for Fq12 { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "Fq12({} + {} * w)", self.c0, self.c1) } } impl Rand for Fq12 { fn rand<R: Rng + ?Sized>(rng: &mut R) -> Self { Fq12 { c0: rng.gen(), c1: rng.gen(), } } } impl ::rand::distributions::Distribution<Fq12> for ::rand::distributions::Standard { fn sample<R: ::rand::Rng + ?Sized>(&self, rng: &mut R) -> Fq12 { Fq12::rand(rng) } } impl Fq12 { pub fn conjugate(&mut self) { self.c1.negate(); } pub fn mul_by_014(&mut self, c0: &Fq2, c1: &Fq2, c4: &Fq2) { let mut aa = self.c0; aa.mul_by_01(c0, c1); let mut bb = self.c1; bb.mul_by_1(c4); let mut o = *c1; o.add_assign(c4); self.c1.add_assign(&self.c0); self.c1.mul_by_01(c0, &o); self.c1.sub_assign(&aa); self.c1.sub_assign(&bb); self.c0 = bb; self.c0.mul_by_nonresidue(); self.c0.add_assign(&aa); } } impl Field for Fq12 { fn zero() -> Self { Fq12 { c0: Fq6::zero(), c1: Fq6::zero(), } } fn one() -> Self { Fq12 { c0: Fq6::one(), c1: Fq6::zero(), } } fn is_zero(&self) -> bool { self.c0.is_zero() && self.c1.is_zero() } fn double(&mut self) { self.c0.double(); self.c1.double(); } fn negate(&mut self) { self.c0.negate(); self.c1.negate(); } fn add_assign(&mut self, other: &Self) { self.c0.add_assign(&other.c0); self.c1.add_assign(&other.c1); } fn sub_assign(&mut self, other: &Self) { self.c0.sub_assign(&other.c0); self.c1.sub_assign(&other.c1); } fn frobenius_map(&mut self, power: usize) { self.c0.frobenius_map(power); self.c1.frobenius_map(power); self.c1.c0.mul_assign(&FROBENIUS_COEFF_FQ12_C1[power % 12]); self.c1.c1.mul_assign(&FROBENIUS_COEFF_FQ12_C1[power % 12]); self.c1.c2.mul_assign(&FROBENIUS_COEFF_FQ12_C1[power % 12]); } fn square(&mut self) { let mut ab = self.c0; ab.mul_assign(&self.c1); let mut c0c1 = self.c0; c0c1.add_assign(&self.c1); let mut c0 = self.c1; c0.mul_by_nonresidue(); c0.add_assign(&self.c0); c0.mul_assign(&c0c1); c0.sub_assign(&ab); self.c1 = ab; self.c1.add_assign(&ab); ab.mul_by_nonresidue(); c0.sub_assign(&ab); self.c0 = c0; } fn mul_assign(&mut self, other: &Self) { let mut aa = self.c0; aa.mul_assign(&other.c0); let mut bb = self.c1; bb.mul_assign(&other.c1); let mut o = other.c0; o.add_assign(&other.c1); self.c1.add_assign(&self.c0); self.c1.mul_assign(&o); self.c1.sub_assign(&aa); self.c1.sub_assign(&bb); self.c0 = bb; self.c0.mul_by_nonresidue(); self.c0.add_assign(&aa); } fn inverse(&self) -> Option<Self> { let mut c0s = self.c0; c0s.square(); let mut c1s = self.c1; c1s.square(); c1s.mul_by_nonresidue(); c0s.sub_assign(&c1s); c0s.inverse().map(|t| { let mut tmp = Fq12 { c0: t, c1: t }; tmp.c0.mul_assign(&self.c0); tmp.c1.mul_assign(&self.c1); tmp.c1.negate(); tmp }) } } #[cfg(test)] use rand::{SeedableRng}; #[cfg(test)] use rand_xorshift::{XorShiftRng}; #[test] fn test_fq12_mul_by_014() { let mut rng = XorShiftRng::seed_from_u64(0x5dbe62598d313d76); for _ in 0..1000 { let c0 = Fq2::rand(&mut rng); let c1 = Fq2::rand(&mut rng); let c5 = Fq2::rand(&mut rng); let mut a = Fq12::rand(&mut rng); let mut b = a; a.mul_by_014(&c0, &c1, &c5); b.mul_assign(&Fq12 { c0: Fq6 { c0: c0, c1: c1, c2: Fq2::zero(), }, c1: Fq6 { c0: Fq2::zero(), c1: c5, c2: Fq2::zero(), }, }); assert_eq!(a, b); } } #[test] fn fq12_field_tests() { use PrimeField; ::tests::field::random_field_tests::<Fq12>(); ::tests::field::random_frobenius_tests::<Fq12, _>(super::fq::Fq::char(), 13); }
23.879397
84
0.506103
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_fq12_mul_by_014() {\n let mut rng = XorShiftRng::seed_from_u64(0x5dbe62598d313d76);\n\n for _ in 0..1000 {\n let c0 = Fq2::rand(&mut rng);\n let c1 = Fq2::rand(&mut rng);\n let c5 = Fq2::rand(&mut rng);\n let mut a = Fq12::rand(&mut rng);\n let mut b = a;\n\n a.mul_by_014(&c0, &c1, &c5);\n b.mul_assign(&Fq12 {\n c0: Fq6 {\n c0: c0,\n c1: c1,\n c2: Fq2::zero(),\n },\n c1: Fq6 {\n c0: Fq2::zero(),\n c1: c5,\n c2: Fq2::zero(),\n },\n });\n\n assert_eq!(a, b);\n }\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn fq12_field_tests() {\n use PrimeField;\n\n ::tests::field::random_field_tests::<Fq12>();\n ::tests::field::random_frobenius_tests::<Fq12, _>(super::fq::Fq::char(), 13);\n}\n}" ]
f70b28bac903cff32e47bb56f114b95321666e07
637
rs
Rust
leetcode/src/math/leetcode1362.rs
SmiteWindows/leetcode
010d7e714c5a960dbb23a07f4aba85bba3450aad
[ "MIT" ]
1
2021-05-13T16:15:20.000Z
2021-05-13T16:15:20.000Z
leetcode/src/math/leetcode1362.rs
SmiteWindows/leetcode
010d7e714c5a960dbb23a07f4aba85bba3450aad
[ "MIT" ]
null
null
null
leetcode/src/math/leetcode1362.rs
SmiteWindows/leetcode
010d7e714c5a960dbb23a07f4aba85bba3450aad
[ "MIT" ]
null
null
null
// https://leetcode-cn.com/problems/closest-divisors/ // Runtime: 4 ms // Memory Usage: 2.1 MB pub fn closest_divisors(num: i32) -> Vec<i32> { for i in (0..=((num + 2) as f64).sqrt() as i32).rev() { if (num + 1) % i == 0 { return vec![(num + 1) / i, i]; } if (num + 2) % i == 0 { return vec![(num + 2) / i, i]; } } vec![] } // math #[test] fn test1_1362() { assert_eq!(closest_divisors(8), vec![3, 3]); // assert_eq!(closest_divisors(123), vec![5, 25]); assert_eq!(closest_divisors(123), vec![25, 5]); assert_eq!(closest_divisors(999), vec![40, 25]); }
27.695652
59
0.518053
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test1_1362() {\n assert_eq!(closest_divisors(8), vec![3, 3]);\n // assert_eq!(closest_divisors(123), vec![5, 25]);\n assert_eq!(closest_divisors(123), vec![25, 5]);\n assert_eq!(closest_divisors(999), vec![40, 25]);\n}\n}" ]
f70b448c233bcf5e81dc8f8c610124120e9ff078
4,515
rs
Rust
git-attributes/tests/match_group/mod.rs
Byron/grit
61abb0b006292d2122784b032e198cc716fb7b92
[ "Apache-2.0", "MIT" ]
149
2020-07-07T09:56:14.000Z
2020-07-30T15:12:14.000Z
git-attributes/tests/match_group/mod.rs
Byron/grit
61abb0b006292d2122784b032e198cc716fb7b92
[ "Apache-2.0", "MIT" ]
4
2020-06-29T06:53:11.000Z
2020-07-25T04:04:14.000Z
git-attributes/tests/match_group/mod.rs
Byron/grit
61abb0b006292d2122784b032e198cc716fb7b92
[ "Apache-2.0", "MIT" ]
2
2020-07-12T18:25:01.000Z
2020-07-24T08:45:22.000Z
mod ignore { use std::io::Read; use bstr::{BStr, ByteSlice}; use git_attributes::{Ignore, Match, MatchGroup}; use git_glob::pattern::Case; struct Expectations<'a> { lines: bstr::Lines<'a>, } impl<'a> Iterator for Expectations<'a> { type Item = (&'a BStr, Option<(&'a BStr, usize, &'a BStr)>); fn next(&mut self) -> Option<Self::Item> { let line = self.lines.next()?; let (left, value) = line.split_at(line.find_byte(b'\t').unwrap()); let value = value[1..].as_bstr(); let source_and_line = if left == b"::" { None } else { let mut tokens = left.split(|b| *b == b':'); let source = tokens.next().unwrap().as_bstr(); let line_number: usize = tokens.next().unwrap().to_str_lossy().parse().ok().unwrap(); let pattern = tokens.next().unwrap().as_bstr(); Some((source, line_number, pattern)) }; Some((value, source_and_line)) } } #[test] fn from_git_dir() -> crate::Result { let dir = git_testtools::scripted_fixture_repo_read_only("make_global_and_external_and_dir_ignores.sh")?; let repo_dir = dir.join("repo"); let git_dir = repo_dir.join(".git"); let baseline = std::fs::read(git_dir.parent().unwrap().join("git-check-ignore.baseline"))?; let mut buf = Vec::new(); let mut group = MatchGroup::from_git_dir(git_dir, Some(dir.join("user.exclude")), &mut buf)?; assert!( !group.add_patterns_file("not-a-file", false, None, &mut buf)?, "missing files are no problem and cause a negative response" ); assert!( group.add_patterns_file(repo_dir.join(".gitignore"), true, repo_dir.as_path().into(), &mut buf)?, "existing files return true" ); buf.clear(); let ignore_file = repo_dir.join("dir-with-ignore").join(".gitignore"); std::fs::File::open(&ignore_file)?.read_to_end(&mut buf)?; group.add_patterns_buffer(&buf, ignore_file, repo_dir.as_path().into()); for (path, source_and_line) in (Expectations { lines: baseline.lines(), }) { let actual = group.pattern_matching_relative_path( path, repo_dir .join(path.to_str_lossy().as_ref()) .metadata() .ok() .map(|m| m.is_dir()), Case::Sensitive, ); match (actual, source_and_line) { ( Some(Match { sequence_number, pattern: _, source, value: _, }), Some((expected_source, line, _expected_pattern)), ) => { assert_eq!(sequence_number, line, "our counting should match the one used in git"); assert_eq!( source.map(|p| p.canonicalize().unwrap()), Some(repo_dir.join(expected_source.to_str_lossy().as_ref()).canonicalize()?) ); } (None, None) => {} (actual, expected) => panic!("actual {:?} should match {:?} with path '{}'", actual, expected, path), } } Ok(()) } #[test] fn from_overrides() { let input = ["simple", "pattern/"]; let group = git_attributes::MatchGroup::<Ignore>::from_overrides(input); assert_eq!( group.pattern_matching_relative_path("Simple", None, git_glob::pattern::Case::Fold), Some(pattern_to_match(&git_glob::parse("simple").unwrap(), 0)) ); assert_eq!( group.pattern_matching_relative_path("pattern", Some(true), git_glob::pattern::Case::Sensitive), Some(pattern_to_match(&git_glob::parse("pattern/").unwrap(), 1)) ); assert_eq!(group.patterns.len(), 1); assert_eq!( git_attributes::PatternList::<Ignore>::from_overrides(input), group.patterns.into_iter().next().unwrap() ); } fn pattern_to_match(pattern: &git_glob::Pattern, sequence_number: usize) -> Match<'_, ()> { Match { pattern, value: &(), source: None, sequence_number, } } }
37.941176
117
0.512957
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn from_git_dir() -> crate::Result {\n let dir = git_testtools::scripted_fixture_repo_read_only(\"make_global_and_external_and_dir_ignores.sh\")?;\n let repo_dir = dir.join(\"repo\");\n let git_dir = repo_dir.join(\".git\");\n let baseline = std::fs::read(git_dir.parent().unwrap().join(\"git-check-ignore.baseline\"))?;\n let mut buf = Vec::new();\n let mut group = MatchGroup::from_git_dir(git_dir, Some(dir.join(\"user.exclude\")), &mut buf)?;\n\n assert!(\n !group.add_patterns_file(\"not-a-file\", false, None, &mut buf)?,\n \"missing files are no problem and cause a negative response\"\n );\n assert!(\n group.add_patterns_file(repo_dir.join(\".gitignore\"), true, repo_dir.as_path().into(), &mut buf)?,\n \"existing files return true\"\n );\n\n buf.clear();\n let ignore_file = repo_dir.join(\"dir-with-ignore\").join(\".gitignore\");\n std::fs::File::open(&ignore_file)?.read_to_end(&mut buf)?;\n group.add_patterns_buffer(&buf, ignore_file, repo_dir.as_path().into());\n\n for (path, source_and_line) in (Expectations {\n lines: baseline.lines(),\n }) {\n let actual = group.pattern_matching_relative_path(\n path,\n repo_dir\n .join(path.to_str_lossy().as_ref())\n .metadata()\n .ok()\n .map(|m| m.is_dir()),\n Case::Sensitive,\n );\n match (actual, source_and_line) {\n (\n Some(Match {\n sequence_number,\n pattern: _,\n source,\n value: _,\n }),\n Some((expected_source, line, _expected_pattern)),\n ) => {\n assert_eq!(sequence_number, line, \"our counting should match the one used in git\");\n assert_eq!(\n source.map(|p| p.canonicalize().unwrap()),\n Some(repo_dir.join(expected_source.to_str_lossy().as_ref()).canonicalize()?)\n );\n }\n (None, None) => {}\n (actual, expected) => panic!(\"actual {:?} should match {:?} with path '{}'\", actual, expected, path),\n }\n }\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn from_overrides() {\n let input = [\"simple\", \"pattern/\"];\n let group = git_attributes::MatchGroup::<Ignore>::from_overrides(input);\n assert_eq!(\n group.pattern_matching_relative_path(\"Simple\", None, git_glob::pattern::Case::Fold),\n Some(pattern_to_match(&git_glob::parse(\"simple\").unwrap(), 0))\n );\n assert_eq!(\n group.pattern_matching_relative_path(\"pattern\", Some(true), git_glob::pattern::Case::Sensitive),\n Some(pattern_to_match(&git_glob::parse(\"pattern/\").unwrap(), 1))\n );\n assert_eq!(group.patterns.len(), 1);\n assert_eq!(\n git_attributes::PatternList::<Ignore>::from_overrides(input),\n group.patterns.into_iter().next().unwrap()\n );\n }\n}" ]
f70b5d5bfc40fed77af51f7f59c346a1b188c2f4
17,545
rs
Rust
russell_sparse/src/sparse_triplet.rs
cpmech/russell
4297f524182b88a384232277293cbb1324d1cfb7
[ "MIT" ]
7
2021-07-13T00:47:29.000Z
2021-12-12T23:06:01.000Z
russell_sparse/src/sparse_triplet.rs
cpmech/russell
4297f524182b88a384232277293cbb1324d1cfb7
[ "MIT" ]
7
2021-06-21T13:30:22.000Z
2021-09-07T12:49:47.000Z
russell_sparse/src/sparse_triplet.rs
cpmech/russell
4297f524182b88a384232277293cbb1324d1cfb7
[ "MIT" ]
1
2021-06-25T00:12:54.000Z
2021-06-25T00:12:54.000Z
use super::Symmetry; use russell_lab::{Matrix, Vector}; use russell_openblas::to_i32; use std::fmt; /// Holds triples (i,j,aij) representing a sparse matrix /// /// # Remarks /// /// - Only the non-zero values are required /// - Entries with repeated (i,j) indices are allowed /// - Repeated (i,j) entries will have the aij values summed when solving a linear system /// - The repeated (i,j) capability is of great convenience for Finite Element solvers /// - A maximum number of entries must be decided prior to allocating a new Triplet /// - The maximum number of entries includes possible entries with repeated indices /// - See the `to_matrix` method for an example pub struct SparseTriplet { pub(crate) nrow: usize, // [i32] number of rows pub(crate) ncol: usize, // [i32] number of columns pub(crate) pos: usize, // [i32] current index => nnz in the end pub(crate) max: usize, // [i32] max allowed number of entries (may be > nnz) pub(crate) symmetry: Symmetry, // Storage option regarding symmetry pub(crate) indices_i: Vec<i32>, // [nnz] indices i pub(crate) indices_j: Vec<i32>, // [nnz] indices j pub(crate) values_aij: Vec<f64>, // [nnz] values aij } impl SparseTriplet { /// Creates a new SparseTriplet representing a sparse matrix /// /// ```text /// trip := sparse(a) /// (max) (nrow,ncol) /// ``` /// /// # Input /// /// * `nrow` -- The number of rows of the sparse matrix /// * `ncol` -- The number of columns of the sparse matrix /// * `max` -- The maximum number fo non-zero values in the sparse matrix, /// including entries with repeated indices /// * `sym` -- Specifies how the data is stored regarding symmetry pub fn new(nrow: usize, ncol: usize, max: usize, sym: Symmetry) -> Result<Self, &'static str> { if nrow == 0 || ncol == 0 || max == 0 { return Err("nrow, ncol, and max must all be greater than zero"); } Ok(SparseTriplet { nrow, ncol, pos: 0, max, symmetry: sym, indices_i: vec![0; max], indices_j: vec![0; max], values_aij: vec![0.0; max], }) } /// Puts the next triple (i,j,aij) into the Triplet pub fn put(&mut self, i: usize, j: usize, aij: f64) { assert!(i < self.nrow); assert!(j < self.ncol); assert!(self.pos < self.max); let i_i32 = to_i32(i); let j_i32 = to_i32(j); self.indices_i[self.pos] = i_i32; self.indices_j[self.pos] = j_i32; self.values_aij[self.pos] = aij; self.pos += 1; } /// Returns the (nrow x ncol) dimensions of the matrix represented by this Triplet /// /// # Output /// /// * `nrow` -- number of rows /// * `ncol` -- number of columns /// /// # Example /// /// ``` /// # fn main() -> Result<(), &'static str> { /// use russell_sparse::*; /// let trip = SparseTriplet::new(2, 2, 1, Symmetry::No)?; /// assert_eq!(trip.dims(), (2, 2)); /// # Ok(()) /// # } /// ``` pub fn dims(&self) -> (usize, usize) { (self.nrow, self.ncol) } /// Converts the triples data to a matrix, up to a limit /// /// # Input /// /// `a` -- (nrow_max, ncol_max) matrix to hold the triples data. Thus, the matrix may have less rows or less columns than the triplet data /// /// # Example /// /// ``` /// # fn main() -> Result<(), &'static str> { /// // import /// use russell_lab::*; /// use russell_sparse::*; /// /// // define (4 x 4) sparse matrix with 6+1 non-zero values /// // (with an extra ij-repeated entry) /// let mut trip = SparseTriplet::new(4, 4, 6+1, Symmetry::No)?; /// trip.put(0, 0, 0.5); // (0, 0, a00/2) /// trip.put(0, 0, 0.5); // (0, 0, a00/2) /// trip.put(0, 1, 2.0); /// trip.put(1, 0, 3.0); /// trip.put(1, 1, 4.0); /// trip.put(2, 2, 5.0); /// trip.put(3, 3, 6.0); /// /// // convert the first (3 x 3) values /// let mut a = Matrix::new(3, 3); /// trip.to_matrix(&mut a)?; /// let correct = "┌ ┐\n\ /// │ 1 2 0 │\n\ /// │ 3 4 0 │\n\ /// │ 0 0 5 │\n\ /// └ ┘"; /// assert_eq!(format!("{}", a), correct); /// /// // convert the first (4 x 4) values /// let mut b = Matrix::new(4, 4); /// trip.to_matrix(&mut b)?; /// let correct = "┌ ┐\n\ /// │ 1 2 0 0 │\n\ /// │ 3 4 0 0 │\n\ /// │ 0 0 5 0 │\n\ /// │ 0 0 0 6 │\n\ /// └ ┘"; /// assert_eq!(format!("{}", b), correct); /// # Ok(()) /// # } /// ``` pub fn to_matrix(&self, a: &mut Matrix) -> Result<(), &'static str> { let (m, n) = a.dims(); if m > self.nrow || n > self.ncol { return Err("wrong matrix dimensions"); } let m_i32 = to_i32(m); let n_i32 = to_i32(n); a.fill(0.0); for p in 0..self.pos { if self.indices_i[p] < m_i32 && self.indices_j[p] < n_i32 { let (i, j) = (self.indices_i[p] as usize, self.indices_j[p] as usize); a[i][j] += self.values_aij[p]; } } Ok(()) } /// Performs the matrix-vector multiplication /// /// ```text /// v := a ⋅ u /// (m) (m,n) (n) /// ``` /// /// # Note /// /// This method is not highly efficient but should useful in verifications. /// /// # Example /// /// ``` /// # fn main() -> Result<(), &'static str> { /// // import /// use russell_lab::*; /// use russell_sparse::*; /// /// // set sparse matrix (4 x 3) with 6 non-zeros /// let mut trip = SparseTriplet::new(4, 3, 6, Symmetry::No)?; /// trip.put(0, 0, 1.0); /// trip.put(1, 0, 2.0); /// trip.put(1, 1, 3.0); /// trip.put(2, 0, 4.0); /// trip.put(3, 0, 5.0); /// trip.put(3, 2, 6.0); /// /// // check matrix /// let (m, n) = trip.dims(); /// let mut a = Matrix::new(m, n); /// trip.to_matrix(&mut a)?; /// let correct_a = "┌ ┐\n\ /// │ 1 0 0 │\n\ /// │ 2 3 0 │\n\ /// │ 4 0 0 │\n\ /// │ 5 0 6 │\n\ /// └ ┘"; /// assert_eq!(format!("{}", a), correct_a); /// /// // perform mat-vec-mul /// let u = Vector::from(&[1.0, 1.0, 1.0]); /// let v = trip.mat_vec_mul(&u)?; /// /// // check vector /// let correct_v = "┌ ┐\n\ /// │ 1 │\n\ /// │ 5 │\n\ /// │ 4 │\n\ /// │ 11 │\n\ /// └ ┘"; /// assert_eq!(format!("{}", v), correct_v); /// # Ok(()) /// # } /// ``` pub fn mat_vec_mul(&self, u: &Vector) -> Result<Vector, &'static str> { if u.dim() != self.ncol { return Err("u.ndim must equal a.ncol"); } let sym_tri = match self.symmetry { Symmetry::GeneralTriangular => true, Symmetry::PosDefTriangular => true, _ => false, }; let mut v = Vector::new(self.nrow); for p in 0..self.pos { let i = self.indices_i[p] as usize; let j = self.indices_j[p] as usize; let aij = self.values_aij[p]; v[i] += aij * u[j]; if sym_tri && i != j { v[j] += aij * u[i]; } } Ok(v) } } impl fmt::Display for SparseTriplet { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "\x20\x20\x20\x20\"nrow\": {},\n\ \x20\x20\x20\x20\"ncol\": {},\n\ \x20\x20\x20\x20\"pos\": {},\n\ \x20\x20\x20\x20\"max\": {},\n\ \x20\x20\x20\x20\"symmetry\": \"{:?}\"", self.nrow, self.ncol, self.pos, self.max, self.symmetry )?; Ok(()) } } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// #[cfg(test)] mod tests { use super::SparseTriplet; use crate::Symmetry; use russell_chk::assert_vec_approx_eq; use russell_lab::{Matrix, Vector}; #[test] fn new_fails_on_wrong_input() { assert_eq!( SparseTriplet::new(0, 3, 5, Symmetry::No).err(), Some("nrow, ncol, and max must all be greater than zero") ); assert_eq!( SparseTriplet::new(3, 0, 5, Symmetry::No).err(), Some("nrow, ncol, and max must all be greater than zero") ); assert_eq!( SparseTriplet::new(3, 3, 0, Symmetry::No).err(), Some("nrow, ncol, and max must all be greater than zero") ); } #[test] fn new_works() -> Result<(), &'static str> { let trip = SparseTriplet::new(3, 3, 5, Symmetry::No)?; assert_eq!(trip.nrow, 3); assert_eq!(trip.ncol, 3); assert_eq!(trip.pos, 0); assert_eq!(trip.max, 5); assert!(matches!(trip.symmetry, Symmetry::No)); Ok(()) } #[test] #[should_panic] fn put_panics_on_wrong_values_1() { let mut trip = SparseTriplet::new(1, 1, 1, Symmetry::No).unwrap(); trip.put(1, 0, 0.0); } #[test] #[should_panic] fn put_panics_on_wrong_values_2() { let mut trip = SparseTriplet::new(1, 1, 1, Symmetry::No).unwrap(); trip.put(0, 1, 0.0); } #[test] #[should_panic] fn put_panics_on_wrong_values_3() { let mut trip = SparseTriplet::new(1, 1, 1, Symmetry::No).unwrap(); trip.put(0, 0, 0.0); // << all spots occupied trip.put(0, 0, 0.0); } #[test] fn put_works() -> Result<(), &'static str> { let mut trip = SparseTriplet::new(3, 3, 5, Symmetry::No)?; trip.put(0, 0, 1.0); assert_eq!(trip.pos, 1); trip.put(0, 1, 2.0); assert_eq!(trip.pos, 2); trip.put(1, 0, 3.0); assert_eq!(trip.pos, 3); trip.put(1, 1, 4.0); assert_eq!(trip.pos, 4); trip.put(2, 2, 5.0); assert_eq!(trip.pos, 5); Ok(()) } #[test] fn dims_works() -> Result<(), &'static str> { let trip = SparseTriplet::new(3, 2, 1, Symmetry::No)?; assert_eq!(trip.dims(), (3, 2)); Ok(()) } #[test] fn to_matrix_fails_on_wrong_dims() -> Result<(), &'static str> { let trip = SparseTriplet::new(1, 1, 1, Symmetry::No)?; let mut a_2x1 = Matrix::new(2, 1); let mut a_1x2 = Matrix::new(1, 2); assert_eq!(trip.to_matrix(&mut a_2x1), Err("wrong matrix dimensions")); assert_eq!(trip.to_matrix(&mut a_1x2), Err("wrong matrix dimensions")); Ok(()) } #[test] fn to_matrix_works() -> Result<(), &'static str> { let mut trip = SparseTriplet::new(3, 3, 5, Symmetry::No)?; trip.put(0, 0, 1.0); trip.put(0, 1, 2.0); trip.put(1, 0, 3.0); trip.put(1, 1, 4.0); trip.put(2, 2, 5.0); let mut a = Matrix::new(3, 3); trip.to_matrix(&mut a)?; assert_eq!(a.get(0, 0), 1.0); assert_eq!(a.get(0, 1), 2.0); assert_eq!(a.get(1, 0), 3.0); assert_eq!(a.get(1, 1), 4.0); assert_eq!(a.get(2, 2), 5.0); let mut b = Matrix::new(2, 1); trip.to_matrix(&mut b)?; assert_eq!(b.get(0, 0), 1.0); assert_eq!(b.get(1, 0), 3.0); Ok(()) } #[test] fn to_matrix_with_duplicates_works() -> Result<(), &'static str> { // allocate a square matrix let mut trip = SparseTriplet::new(5, 5, 13, Symmetry::No)?; trip.put(0, 0, 1.0); // << (0, 0, a00/2) trip.put(0, 0, 1.0); // << (0, 0, a00/2) trip.put(1, 0, 3.0); trip.put(0, 1, 3.0); trip.put(2, 1, -1.0); trip.put(4, 1, 4.0); trip.put(1, 2, 4.0); trip.put(2, 2, -3.0); trip.put(3, 2, 1.0); trip.put(4, 2, 2.0); trip.put(2, 3, 2.0); trip.put(1, 4, 6.0); trip.put(4, 4, 1.0); // print matrix let (m, n) = trip.dims(); let mut a = Matrix::new(m, n); trip.to_matrix(&mut a)?; let correct = "┌ ┐\n\ │ 2 3 0 0 0 │\n\ │ 3 0 4 0 6 │\n\ │ 0 -1 -3 2 0 │\n\ │ 0 0 1 0 0 │\n\ │ 0 4 2 0 1 │\n\ └ ┘"; assert_eq!(format!("{}", a), correct); Ok(()) } #[test] fn mat_vec_mul_fails_on_wrong_input() -> Result<(), &'static str> { let trip = SparseTriplet::new(2, 2, 1, Symmetry::No)?; let u = Vector::new(3); assert_eq!(trip.mat_vec_mul(&u).err(), Some("u.ndim must equal a.ncol")); Ok(()) } #[test] fn mat_vec_mul_works() -> Result<(), &'static str> { // 1.0 2.0 3.0 4.0 5.0 // 0.1 0.2 0.3 0.4 0.5 // 10.0 20.0 30.0 40.0 50.0 let mut trip = SparseTriplet::new(3, 5, 15, Symmetry::No)?; trip.put(0, 0, 1.0); trip.put(0, 1, 2.0); trip.put(0, 2, 3.0); trip.put(0, 3, 4.0); trip.put(0, 4, 5.0); trip.put(1, 0, 0.1); trip.put(1, 1, 0.2); trip.put(1, 2, 0.3); trip.put(1, 3, 0.4); trip.put(1, 4, 0.5); trip.put(2, 0, 10.0); trip.put(2, 1, 20.0); trip.put(2, 2, 30.0); trip.put(2, 3, 40.0); trip.put(2, 4, 50.0); let u = Vector::from(&[0.1, 0.2, 0.3, 0.4, 0.5]); let correct_v = &[5.5, 0.55, 55.0]; let v = trip.mat_vec_mul(&u)?; assert_vec_approx_eq!(v.as_data(), correct_v, 1e-15); Ok(()) } #[test] fn mat_vec_mul_sym_part_works() -> Result<(), &'static str> { // 2 // 1 2 sym // 1 2 9 // 3 1 1 7 // 2 1 5 1 8 let mut trip = SparseTriplet::new(5, 5, 15, Symmetry::GeneralTriangular)?; trip.put(0, 0, 2.0); trip.put(1, 1, 2.0); trip.put(2, 2, 9.0); trip.put(3, 3, 7.0); trip.put(4, 4, 8.0); trip.put(1, 0, 1.0); trip.put(2, 0, 1.0); trip.put(2, 1, 2.0); trip.put(3, 0, 3.0); trip.put(3, 1, 1.0); trip.put(3, 2, 1.0); trip.put(4, 0, 2.0); trip.put(4, 1, 1.0); trip.put(4, 2, 5.0); trip.put(4, 3, 1.0); let u = Vector::from(&[-629.0 / 98.0, 237.0 / 49.0, -53.0 / 49.0, 62.0 / 49.0, 23.0 / 14.0]); let correct_v = &[-2.0, 4.0, 3.0, -5.0, 1.0]; let v = trip.mat_vec_mul(&u)?; assert_vec_approx_eq!(v.as_data(), correct_v, 1e-14); Ok(()) } #[test] fn mat_vec_mul_sym_full_works() -> Result<(), &'static str> { // 2 1 1 3 2 // 1 2 2 1 1 // 1 2 9 1 5 // 3 1 1 7 1 // 2 1 5 1 8 let mut trip = SparseTriplet::new(5, 5, 25, Symmetry::General)?; trip.put(0, 0, 2.0); trip.put(1, 1, 2.0); trip.put(2, 2, 9.0); trip.put(3, 3, 7.0); trip.put(4, 4, 8.0); trip.put(1, 0, 1.0); trip.put(0, 1, 1.0); trip.put(2, 0, 1.0); trip.put(0, 2, 1.0); trip.put(2, 1, 2.0); trip.put(1, 2, 2.0); trip.put(3, 0, 3.0); trip.put(0, 3, 3.0); trip.put(3, 1, 1.0); trip.put(1, 3, 1.0); trip.put(3, 2, 1.0); trip.put(2, 3, 1.0); trip.put(4, 0, 2.0); trip.put(0, 4, 2.0); trip.put(4, 1, 1.0); trip.put(1, 4, 1.0); trip.put(4, 2, 5.0); trip.put(2, 4, 5.0); trip.put(4, 3, 1.0); trip.put(3, 4, 1.0); let u = Vector::from(&[-629.0 / 98.0, 237.0 / 49.0, -53.0 / 49.0, 62.0 / 49.0, 23.0 / 14.0]); let correct_v = &[-2.0, 4.0, 3.0, -5.0, 1.0]; let v = trip.mat_vec_mul(&u)?; assert_vec_approx_eq!(v.as_data(), correct_v, 1e-14); Ok(()) } #[test] fn mat_vec_mul_pos_def_works() -> Result<(), &'static str> { // 2 -1 2 ... // -1 2 -1 => -1 2 // -1 2 -1 2 let mut trip = SparseTriplet::new(3, 3, 5, Symmetry::PosDefTriangular)?; trip.put(0, 0, 2.0); trip.put(1, 1, 2.0); trip.put(2, 2, 2.0); trip.put(1, 0, -1.0); trip.put(2, 1, -1.0); let u = Vector::from(&[5.0, 8.0, 7.0]); let correct_v = &[2.0, 4.0, 6.0]; let v = trip.mat_vec_mul(&u)?; assert_vec_approx_eq!(v.as_data(), correct_v, 1e-15); Ok(()) } #[test] fn display_trait_works() -> Result<(), &'static str> { let trip = SparseTriplet::new(3, 3, 1, Symmetry::General)?; let correct: &str = "\x20\x20\x20\x20\"nrow\": 3,\n\ \x20\x20\x20\x20\"ncol\": 3,\n\ \x20\x20\x20\x20\"pos\": 0,\n\ \x20\x20\x20\x20\"max\": 1,\n\ \x20\x20\x20\x20\"symmetry\": \"General\""; assert_eq!(format!("{}", trip), correct); Ok(()) } }
32.430684
142
0.450385
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn new_fails_on_wrong_input() {\n assert_eq!(\n SparseTriplet::new(0, 3, 5, Symmetry::No).err(),\n Some(\"nrow, ncol, and max must all be greater than zero\")\n );\n assert_eq!(\n SparseTriplet::new(3, 0, 5, Symmetry::No).err(),\n Some(\"nrow, ncol, and max must all be greater than zero\")\n );\n assert_eq!(\n SparseTriplet::new(3, 3, 0, Symmetry::No).err(),\n Some(\"nrow, ncol, and max must all be greater than zero\")\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn new_works() -> Result<(), &'static str> {\n let trip = SparseTriplet::new(3, 3, 5, Symmetry::No)?;\n assert_eq!(trip.nrow, 3);\n assert_eq!(trip.ncol, 3);\n assert_eq!(trip.pos, 0);\n assert_eq!(trip.max, 5);\n assert!(matches!(trip.symmetry, Symmetry::No));\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn put_panics_on_wrong_values_1() {\n let mut trip = SparseTriplet::new(1, 1, 1, Symmetry::No).unwrap();\n trip.put(1, 0, 0.0);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn put_panics_on_wrong_values_2() {\n let mut trip = SparseTriplet::new(1, 1, 1, Symmetry::No).unwrap();\n trip.put(0, 1, 0.0);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn put_panics_on_wrong_values_3() {\n let mut trip = SparseTriplet::new(1, 1, 1, Symmetry::No).unwrap();\n trip.put(0, 0, 0.0); // << all spots occupied\n trip.put(0, 0, 0.0);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn put_works() -> Result<(), &'static str> {\n let mut trip = SparseTriplet::new(3, 3, 5, Symmetry::No)?;\n trip.put(0, 0, 1.0);\n assert_eq!(trip.pos, 1);\n trip.put(0, 1, 2.0);\n assert_eq!(trip.pos, 2);\n trip.put(1, 0, 3.0);\n assert_eq!(trip.pos, 3);\n trip.put(1, 1, 4.0);\n assert_eq!(trip.pos, 4);\n trip.put(2, 2, 5.0);\n assert_eq!(trip.pos, 5);\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn dims_works() -> Result<(), &'static str> {\n let trip = SparseTriplet::new(3, 2, 1, Symmetry::No)?;\n assert_eq!(trip.dims(), (3, 2));\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn to_matrix_fails_on_wrong_dims() -> Result<(), &'static str> {\n let trip = SparseTriplet::new(1, 1, 1, Symmetry::No)?;\n let mut a_2x1 = Matrix::new(2, 1);\n let mut a_1x2 = Matrix::new(1, 2);\n assert_eq!(trip.to_matrix(&mut a_2x1), Err(\"wrong matrix dimensions\"));\n assert_eq!(trip.to_matrix(&mut a_1x2), Err(\"wrong matrix dimensions\"));\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn to_matrix_works() -> Result<(), &'static str> {\n let mut trip = SparseTriplet::new(3, 3, 5, Symmetry::No)?;\n trip.put(0, 0, 1.0);\n trip.put(0, 1, 2.0);\n trip.put(1, 0, 3.0);\n trip.put(1, 1, 4.0);\n trip.put(2, 2, 5.0);\n let mut a = Matrix::new(3, 3);\n trip.to_matrix(&mut a)?;\n assert_eq!(a.get(0, 0), 1.0);\n assert_eq!(a.get(0, 1), 2.0);\n assert_eq!(a.get(1, 0), 3.0);\n assert_eq!(a.get(1, 1), 4.0);\n assert_eq!(a.get(2, 2), 5.0);\n let mut b = Matrix::new(2, 1);\n trip.to_matrix(&mut b)?;\n assert_eq!(b.get(0, 0), 1.0);\n assert_eq!(b.get(1, 0), 3.0);\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn to_matrix_with_duplicates_works() -> Result<(), &'static str> {\n // allocate a square matrix\n let mut trip = SparseTriplet::new(5, 5, 13, Symmetry::No)?;\n trip.put(0, 0, 1.0); // << (0, 0, a00/2)\n trip.put(0, 0, 1.0); // << (0, 0, a00/2)\n trip.put(1, 0, 3.0);\n trip.put(0, 1, 3.0);\n trip.put(2, 1, -1.0);\n trip.put(4, 1, 4.0);\n trip.put(1, 2, 4.0);\n trip.put(2, 2, -3.0);\n trip.put(3, 2, 1.0);\n trip.put(4, 2, 2.0);\n trip.put(2, 3, 2.0);\n trip.put(1, 4, 6.0);\n trip.put(4, 4, 1.0);\n\n // print matrix\n let (m, n) = trip.dims();\n let mut a = Matrix::new(m, n);\n trip.to_matrix(&mut a)?;\n let correct = \"┌ ┐\\n\\\n │ 2 3 0 0 0 │\\n\\\n │ 3 0 4 0 6 │\\n\\\n │ 0 -1 -3 2 0 │\\n\\\n │ 0 0 1 0 0 │\\n\\\n │ 0 4 2 0 1 │\\n\\\n └ ┘\";\n assert_eq!(format!(\"{}\", a), correct);\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn mat_vec_mul_fails_on_wrong_input() -> Result<(), &'static str> {\n let trip = SparseTriplet::new(2, 2, 1, Symmetry::No)?;\n let u = Vector::new(3);\n assert_eq!(trip.mat_vec_mul(&u).err(), Some(\"u.ndim must equal a.ncol\"));\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn mat_vec_mul_works() -> Result<(), &'static str> {\n // 1.0 2.0 3.0 4.0 5.0\n // 0.1 0.2 0.3 0.4 0.5\n // 10.0 20.0 30.0 40.0 50.0\n let mut trip = SparseTriplet::new(3, 5, 15, Symmetry::No)?;\n trip.put(0, 0, 1.0);\n trip.put(0, 1, 2.0);\n trip.put(0, 2, 3.0);\n trip.put(0, 3, 4.0);\n trip.put(0, 4, 5.0);\n trip.put(1, 0, 0.1);\n trip.put(1, 1, 0.2);\n trip.put(1, 2, 0.3);\n trip.put(1, 3, 0.4);\n trip.put(1, 4, 0.5);\n trip.put(2, 0, 10.0);\n trip.put(2, 1, 20.0);\n trip.put(2, 2, 30.0);\n trip.put(2, 3, 40.0);\n trip.put(2, 4, 50.0);\n let u = Vector::from(&[0.1, 0.2, 0.3, 0.4, 0.5]);\n let correct_v = &[5.5, 0.55, 55.0];\n let v = trip.mat_vec_mul(&u)?;\n assert_vec_approx_eq!(v.as_data(), correct_v, 1e-15);\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn mat_vec_mul_sym_part_works() -> Result<(), &'static str> {\n // 2\n // 1 2 sym\n // 1 2 9\n // 3 1 1 7\n // 2 1 5 1 8\n let mut trip = SparseTriplet::new(5, 5, 15, Symmetry::GeneralTriangular)?;\n trip.put(0, 0, 2.0);\n trip.put(1, 1, 2.0);\n trip.put(2, 2, 9.0);\n trip.put(3, 3, 7.0);\n trip.put(4, 4, 8.0);\n\n trip.put(1, 0, 1.0);\n\n trip.put(2, 0, 1.0);\n trip.put(2, 1, 2.0);\n\n trip.put(3, 0, 3.0);\n trip.put(3, 1, 1.0);\n trip.put(3, 2, 1.0);\n\n trip.put(4, 0, 2.0);\n trip.put(4, 1, 1.0);\n trip.put(4, 2, 5.0);\n trip.put(4, 3, 1.0);\n let u = Vector::from(&[-629.0 / 98.0, 237.0 / 49.0, -53.0 / 49.0, 62.0 / 49.0, 23.0 / 14.0]);\n let correct_v = &[-2.0, 4.0, 3.0, -5.0, 1.0];\n let v = trip.mat_vec_mul(&u)?;\n assert_vec_approx_eq!(v.as_data(), correct_v, 1e-14);\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn mat_vec_mul_sym_full_works() -> Result<(), &'static str> {\n // 2 1 1 3 2\n // 1 2 2 1 1\n // 1 2 9 1 5\n // 3 1 1 7 1\n // 2 1 5 1 8\n let mut trip = SparseTriplet::new(5, 5, 25, Symmetry::General)?;\n trip.put(0, 0, 2.0);\n trip.put(1, 1, 2.0);\n trip.put(2, 2, 9.0);\n trip.put(3, 3, 7.0);\n trip.put(4, 4, 8.0);\n\n trip.put(1, 0, 1.0);\n trip.put(0, 1, 1.0);\n\n trip.put(2, 0, 1.0);\n trip.put(0, 2, 1.0);\n trip.put(2, 1, 2.0);\n trip.put(1, 2, 2.0);\n\n trip.put(3, 0, 3.0);\n trip.put(0, 3, 3.0);\n trip.put(3, 1, 1.0);\n trip.put(1, 3, 1.0);\n trip.put(3, 2, 1.0);\n trip.put(2, 3, 1.0);\n\n trip.put(4, 0, 2.0);\n trip.put(0, 4, 2.0);\n trip.put(4, 1, 1.0);\n trip.put(1, 4, 1.0);\n trip.put(4, 2, 5.0);\n trip.put(2, 4, 5.0);\n trip.put(4, 3, 1.0);\n trip.put(3, 4, 1.0);\n let u = Vector::from(&[-629.0 / 98.0, 237.0 / 49.0, -53.0 / 49.0, 62.0 / 49.0, 23.0 / 14.0]);\n let correct_v = &[-2.0, 4.0, 3.0, -5.0, 1.0];\n let v = trip.mat_vec_mul(&u)?;\n assert_vec_approx_eq!(v.as_data(), correct_v, 1e-14);\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn mat_vec_mul_pos_def_works() -> Result<(), &'static str> {\n // 2 -1 2 ...\n // -1 2 -1 => -1 2 \n // -1 2 -1 2\n let mut trip = SparseTriplet::new(3, 3, 5, Symmetry::PosDefTriangular)?;\n trip.put(0, 0, 2.0);\n trip.put(1, 1, 2.0);\n trip.put(2, 2, 2.0);\n trip.put(1, 0, -1.0);\n trip.put(2, 1, -1.0);\n let u = Vector::from(&[5.0, 8.0, 7.0]);\n let correct_v = &[2.0, 4.0, 6.0];\n let v = trip.mat_vec_mul(&u)?;\n assert_vec_approx_eq!(v.as_data(), correct_v, 1e-15);\n Ok(())\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn display_trait_works() -> Result<(), &'static str> {\n let trip = SparseTriplet::new(3, 3, 1, Symmetry::General)?;\n let correct: &str = \"\\x20\\x20\\x20\\x20\\\"nrow\\\": 3,\\n\\\n \\x20\\x20\\x20\\x20\\\"ncol\\\": 3,\\n\\\n \\x20\\x20\\x20\\x20\\\"pos\\\": 0,\\n\\\n \\x20\\x20\\x20\\x20\\\"max\\\": 1,\\n\\\n \\x20\\x20\\x20\\x20\\\"symmetry\\\": \\\"General\\\"\";\n assert_eq!(format!(\"{}\", trip), correct);\n Ok(())\n }\n}" ]
f70b754f13d0d04e13365ea7b421ccc4528f7c8f
5,560
rs
Rust
src/params.rs
sansare/stripe-rs
ae1df580bc40f46329d80ff85347c376958b0411
[ "Apache-2.0", "MIT" ]
null
null
null
src/params.rs
sansare/stripe-rs
ae1df580bc40f46329d80ff85347c376958b0411
[ "Apache-2.0", "MIT" ]
null
null
null
src/params.rs
sansare/stripe-rs
ae1df580bc40f46329d80ff85347c376958b0411
[ "Apache-2.0", "MIT" ]
3
2019-06-05T23:35:07.000Z
2020-08-01T18:43:37.000Z
use crate::config::{err, ok, Client, Response}; use crate::error::Error; use serde::de::DeserializeOwned; use serde_derive::{Deserialize, Serialize}; use std::collections::HashMap; #[derive(Clone, Default)] pub struct Headers { pub stripe_account: Option<String>, pub client_id: Option<String>, } pub trait Identifiable { fn id(&self) -> &str; } #[derive(Debug, Deserialize, Serialize)] pub struct List<T> { pub data: Vec<T>, pub has_more: bool, pub total_count: Option<u64>, pub url: String, } impl<T: Clone> Clone for List<T> { fn clone(&self) -> Self { List { data: self.data.clone(), has_more: self.has_more, total_count: self.total_count, url: self.url.clone(), } } } impl<T: DeserializeOwned + Send + 'static> List<T> { /// Prefer `List::next` when possible pub fn get_next(client: &Client, url: &str, last_id: &str) -> Response<List<T>> { if url.starts_with("/v1/") { // TODO: Maybe parse the URL? Perhaps `List` should always parse its `url` field. let mut url = url.trim_start_matches("/v1/").to_string(); if url.contains('?') { url.push_str(&format!("&starting_after={}", last_id)); } else { url.push_str(&format!("?starting_after={}", last_id)); } client.get(&url) } else { err(Error::Unsupported("URL for fetching additional data uses different API version")) } } } impl<T: Identifiable + DeserializeOwned + Send + 'static> List<T> { /// Repeatedly queries Stripe for more data until all elements in list are fetched, using /// Stripe's default page size. /// /// Not supported by `stripe::async::Client`. #[cfg(not(feature = "async"))] pub fn get_all(self, client: &Client) -> Response<Vec<T>> { let mut data = Vec::new(); let mut next = self; loop { if next.has_more { let resp = next.next(&client)?; data.extend(next.data); next = resp; } else { data.extend(next.data); break; } } Ok(data) } /// Fetch additional page of data from stripe pub fn next(&self, client: &Client) -> Response<List<T>> { if let Some(last_id) = self.data.last().map(|d| d.id()) { List::get_next(client, &self.url, last_id) } else { ok(List { data: Vec::new(), has_more: false, total_count: self.total_count, url: self.url.clone(), }) } } } pub type Metadata = HashMap<String, String>; pub type Timestamp = i64; #[derive(Clone, Debug, Deserialize, Serialize)] #[serde(rename_all = "lowercase")] pub struct RangeBounds<T> { pub gt: Option<T>, pub gte: Option<T>, pub lt: Option<T>, pub lte: Option<T>, } impl<T> Default for RangeBounds<T> { fn default() -> Self { RangeBounds { gt: None, gte: None, lt: None, lte: None } } } /// A set of generic request parameters that can be used on /// list endpoints to filter their results by some timestamp. #[derive(Clone, Debug, Deserialize, Serialize)] #[serde(untagged)] pub enum RangeQuery<T> { Exact(T), Bounds(RangeBounds<T>), } impl<T> RangeQuery<T> { /// Filter results to exactly match a given value pub fn eq(value: T) -> RangeQuery<T> { RangeQuery::Exact(value) } /// Filter results to be after a given value pub fn gt(value: T) -> RangeQuery<T> { let mut bounds = RangeBounds::default(); bounds.gt = Some(value); RangeQuery::Bounds(bounds) } /// Filter results to be after or equal to a given value pub fn gte(value: T) -> RangeQuery<T> { let mut bounds = RangeBounds::default(); bounds.gte = Some(value); RangeQuery::Bounds(bounds) } /// Filter results to be before to a given value pub fn lt(value: T) -> RangeQuery<T> { let mut bounds = RangeBounds::default(); bounds.gt = Some(value); RangeQuery::Bounds(bounds) } /// Filter results to be before or equal to a given value pub fn lte(value: T) -> RangeQuery<T> { let mut bounds = RangeBounds::default(); bounds.gte = Some(value); RangeQuery::Bounds(bounds) } } // NOTE: Only intended to handle conversion from ASCII CamelCase to SnakeCase // This function is used to convert static Rust identifiers to snakecase // TODO: pub(crate) fn pub fn to_snakecase(camel: &str) -> String { let mut i = 0; let mut snake = String::new(); let mut chars = camel.chars().peekable(); while let Some(ch) = chars.next() { if ch.is_uppercase() { if i > 0 && !chars.peek().unwrap_or(&'A').is_uppercase() { snake.push('_'); } snake.push(ch.to_lowercase().next().unwrap_or(ch)); } else { snake.push(ch); } i += 1; } snake } #[cfg(test)] mod tests { #[test] fn to_snakecase() { use super::to_snakecase; assert_eq!(to_snakecase("snake_case").as_str(), "snake_case"); assert_eq!(to_snakecase("CamelCase").as_str(), "camel_case"); assert_eq!(to_snakecase("XMLHttpRequest").as_str(), "xml_http_request"); assert_eq!(to_snakecase("UPPER").as_str(), "upper"); assert_eq!(to_snakecase("lower").as_str(), "lower"); } }
29.574468
98
0.572482
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn to_snakecase() {\n use super::to_snakecase;\n\n assert_eq!(to_snakecase(\"snake_case\").as_str(), \"snake_case\");\n assert_eq!(to_snakecase(\"CamelCase\").as_str(), \"camel_case\");\n assert_eq!(to_snakecase(\"XMLHttpRequest\").as_str(), \"xml_http_request\");\n assert_eq!(to_snakecase(\"UPPER\").as_str(), \"upper\");\n assert_eq!(to_snakecase(\"lower\").as_str(), \"lower\");\n }\n}" ]
f70ba1ccc460032fcccf14a03737d60b6aedcb2a
69,906
rs
Rust
r1cs-std/src/bits/boolean.rs
mrmr1993/zexe
04dbf643a8aa4231bb2051ae8f3c0798fdd33f10
[ "Apache-2.0", "MIT" ]
1
2022-03-21T15:44:00.000Z
2022-03-21T15:44:00.000Z
r1cs-std/src/bits/boolean.rs
mrmr1993/zexe
04dbf643a8aa4231bb2051ae8f3c0798fdd33f10
[ "Apache-2.0", "MIT" ]
null
null
null
r1cs-std/src/bits/boolean.rs
mrmr1993/zexe
04dbf643a8aa4231bb2051ae8f3c0798fdd33f10
[ "Apache-2.0", "MIT" ]
null
null
null
use algebra::{BitIterator, Field, FpParameters, PrimeField}; use crate::{prelude::*, Assignment}; use r1cs_core::{ConstraintSystem, LinearCombination, SynthesisError, Variable}; use std::borrow::Borrow; /// Represents a variable in the constraint system which is guaranteed /// to be either zero or one. #[derive(Copy, Clone, Debug)] pub struct AllocatedBit { variable: Variable, value: Option<bool>, } impl AllocatedBit { pub fn get_value(&self) -> Option<bool> { self.value } pub fn get_variable(&self) -> Variable { self.variable } /// Performs an XOR operation over the two operands, returning /// an `AllocatedBit`. pub fn xor<ConstraintF, CS>(mut cs: CS, a: &Self, b: &Self) -> Result<Self, SynthesisError> where ConstraintF: Field, CS: ConstraintSystem<ConstraintF>, { let mut result_value = None; let result_var = cs.alloc( || "xor result", || { if a.value.get()? ^ b.value.get()? { result_value = Some(true); Ok(ConstraintF::one()) } else { result_value = Some(false); Ok(ConstraintF::zero()) } }, )?; // Constrain (a + a) * (b) = (a + b - c) // Given that a and b are boolean constrained, if they // are equal, the only solution for c is 0, and if they // are different, the only solution for c is 1. // // ¬(a ∧ b) ∧ ¬(¬a ∧ ¬b) = c // (1 - (a * b)) * (1 - ((1 - a) * (1 - b))) = c // (1 - ab) * (1 - (1 - a - b + ab)) = c // (1 - ab) * (a + b - ab) = c // a + b - ab - (a^2)b - (b^2)a + (a^2)(b^2) = c // a + b - ab - ab - ab + ab = c // a + b - 2ab = c // -2a * b = c - a - b // 2a * b = a + b - c // (a + a) * b = a + b - c cs.enforce( || "xor constraint", |lc| lc + a.variable + a.variable, |lc| lc + b.variable, |lc| lc + a.variable + b.variable - result_var, ); Ok(AllocatedBit { variable: result_var, value: result_value, }) } /// Performs an AND operation over the two operands, returning /// an `AllocatedBit`. pub fn and<ConstraintF, CS>(mut cs: CS, a: &Self, b: &Self) -> Result<Self, SynthesisError> where ConstraintF: Field, CS: ConstraintSystem<ConstraintF>, { let mut result_value = None; let result_var = cs.alloc( || "and result", || { if a.value.get()? & b.value.get()? { result_value = Some(true); Ok(ConstraintF::one()) } else { result_value = Some(false); Ok(ConstraintF::zero()) } }, )?; // Constrain (a) * (b) = (c), ensuring c is 1 iff // a AND b are both 1. cs.enforce( || "and constraint", |lc| lc + a.variable, |lc| lc + b.variable, |lc| lc + result_var, ); Ok(AllocatedBit { variable: result_var, value: result_value, }) } /// Performs an OR operation over the two operands, returning /// an `AllocatedBit`. pub fn or<ConstraintF, CS>(cs: CS, a: &Self, b: &Self) -> Result<Self, SynthesisError> where ConstraintF: Field, CS: ConstraintSystem<ConstraintF>, { Self::conditionally_select(cs, &Boolean::from(*a), a, b) } /// Calculates `a AND (NOT b)`. pub fn and_not<ConstraintF, CS>(mut cs: CS, a: &Self, b: &Self) -> Result<Self, SynthesisError> where ConstraintF: Field, CS: ConstraintSystem<ConstraintF>, { let mut result_value = None; let result_var = cs.alloc( || "and not result", || { if a.value.get()? & !b.value.get()? { result_value = Some(true); Ok(ConstraintF::one()) } else { result_value = Some(false); Ok(ConstraintF::zero()) } }, )?; // Constrain (a) * (1 - b) = (c), ensuring c is 1 iff // a is true and b is false, and otherwise c is 0. cs.enforce( || "and not constraint", |lc| lc + a.variable, |lc| lc + CS::one() - b.variable, |lc| lc + result_var, ); Ok(AllocatedBit { variable: result_var, value: result_value, }) } /// Calculates `(NOT a) AND (NOT b)`. pub fn nor<ConstraintF, CS>(mut cs: CS, a: &Self, b: &Self) -> Result<Self, SynthesisError> where ConstraintF: Field, CS: ConstraintSystem<ConstraintF>, { let mut result_value = None; let result_var = cs.alloc( || "nor result", || { if !a.value.get()? & !b.value.get()? { result_value = Some(true); Ok(ConstraintF::one()) } else { result_value = Some(false); Ok(ConstraintF::zero()) } }, )?; // Constrain (1 - a) * (1 - b) = (c), ensuring c is 1 iff // a and b are both false, and otherwise c is 0. cs.enforce( || "nor constraint", |lc| lc + CS::one() - a.variable, |lc| lc + CS::one() - b.variable, |lc| lc + result_var, ); Ok(AllocatedBit { variable: result_var, value: result_value, }) } } impl PartialEq for AllocatedBit { fn eq(&self, other: &Self) -> bool { self.value.is_some() && other.value.is_some() && self.value == other.value } } impl Eq for AllocatedBit {} impl<ConstraintF: Field> AllocGadget<bool, ConstraintF> for AllocatedBit { fn alloc<F, T, CS: ConstraintSystem<ConstraintF>>( mut cs: CS, value_gen: F, ) -> Result<Self, SynthesisError> where F: FnOnce() -> Result<T, SynthesisError>, T: Borrow<bool>, { let mut value = None; let var = cs.alloc( || "boolean", || { value = Some(*value_gen()?.borrow()); if value.get()? { Ok(ConstraintF::one()) } else { Ok(ConstraintF::zero()) } }, )?; // Constrain: (1 - a) * a = 0 // This constrains a to be either 0 or 1. cs.enforce( || "boolean constraint", |lc| lc + CS::one() - var, |lc| lc + var, |lc| lc, ); Ok(AllocatedBit { variable: var, value, }) } fn alloc_input<F, T, CS: ConstraintSystem<ConstraintF>>( mut cs: CS, value_gen: F, ) -> Result<Self, SynthesisError> where F: FnOnce() -> Result<T, SynthesisError>, T: Borrow<bool>, { let mut value = None; let var = cs.alloc_input( || "boolean", || { value = Some(*value_gen()?.borrow()); if value.get()? { Ok(ConstraintF::one()) } else { Ok(ConstraintF::zero()) } }, )?; // Constrain: (1 - a) * a = 0 // This constrains a to be either 0 or 1. cs.enforce( || "boolean constraint", |lc| lc + CS::one() - var, |lc| lc + var, |lc| lc, ); Ok(AllocatedBit { variable: var, value, }) } } impl<ConstraintF: Field> CondSelectGadget<ConstraintF> for AllocatedBit { fn conditionally_select<CS: ConstraintSystem<ConstraintF>>( mut cs: CS, cond: &Boolean, first: &Self, second: &Self, ) -> Result<Self, SynthesisError> { let result = Self::alloc(cs.ns(|| ""), || { cond.get_value() .and_then(|cond| { { if cond { first } else { second } } .get_value() }) .get() })?; // a = self; b = other; c = cond; // // r = c * a + (1 - c) * b // r = b + c * (a - b) // c * (a - b) = r - b let one = CS::one(); cs.enforce( || "conditionally_select", |_| cond.lc(one, ConstraintF::one()), |lc| lc + first.variable - second.variable, |lc| lc + result.variable - second.variable, ); Ok(result) } fn cost() -> usize { 1 } } /// This is a boolean value which may be either a constant or /// an interpretation of an `AllocatedBit`. #[derive(Copy, Clone, Debug)] pub enum Boolean { /// Existential view of the boolean variable Is(AllocatedBit), /// Negated view of the boolean variable Not(AllocatedBit), /// Constant (not an allocated variable) Constant(bool), } impl Boolean { pub fn get_value(&self) -> Option<bool> { match *self { Boolean::Constant(c) => Some(c), Boolean::Is(ref v) => v.get_value(), Boolean::Not(ref v) => v.get_value().map(|b| !b), } } pub fn lc<ConstraintF: Field>( &self, one: Variable, coeff: ConstraintF, ) -> LinearCombination<ConstraintF> { match *self { Boolean::Constant(c) => { if c { (coeff, one).into() } else { LinearCombination::<ConstraintF>::zero() } }, Boolean::Is(ref v) => (coeff, v.get_variable()).into(), Boolean::Not(ref v) => { LinearCombination::<ConstraintF>::zero() + (coeff, one) - (coeff, v.get_variable()) }, } } /// Construct a boolean vector from a vector of u8 pub fn constant_u8_vec<ConstraintF: Field, CS: ConstraintSystem<ConstraintF>>( cs: &mut CS, values: &[u8], ) -> Vec<Self> { let mut input_bits = vec![]; for (byte_i, input_byte) in values.iter().enumerate() { for bit_i in (0..8).rev() { let cs = cs.ns(|| format!("input_bit_gadget {} {}", byte_i, bit_i)); input_bits.push( AllocatedBit::alloc(cs, || Ok((input_byte >> bit_i) & 1u8 == 1u8)) .unwrap() .into(), ); } } input_bits } /// Construct a boolean from a known constant pub fn constant(b: bool) -> Self { Boolean::Constant(b) } /// Return a negated interpretation of this boolean. pub fn not(&self) -> Self { match *self { Boolean::Constant(c) => Boolean::Constant(!c), Boolean::Is(ref v) => Boolean::Not(*v), Boolean::Not(ref v) => Boolean::Is(*v), } } /// Perform XOR over two boolean operands pub fn xor<'a, ConstraintF, CS>( cs: CS, a: &'a Self, b: &'a Self, ) -> Result<Self, SynthesisError> where ConstraintF: Field, CS: ConstraintSystem<ConstraintF>, { match (a, b) { (&Boolean::Constant(false), x) | (x, &Boolean::Constant(false)) => Ok(*x), (&Boolean::Constant(true), x) | (x, &Boolean::Constant(true)) => Ok(x.not()), // a XOR (NOT b) = NOT(a XOR b) (is @ &Boolean::Is(_), not @ &Boolean::Not(_)) | (not @ &Boolean::Not(_), is @ &Boolean::Is(_)) => { Ok(Boolean::xor(cs, is, &not.not())?.not()) }, // a XOR b = (NOT a) XOR (NOT b) (&Boolean::Is(ref a), &Boolean::Is(ref b)) | (&Boolean::Not(ref a), &Boolean::Not(ref b)) => { Ok(Boolean::Is(AllocatedBit::xor(cs, a, b)?)) }, } } /// Perform OR over two boolean operands pub fn or<'a, ConstraintF, CS>(cs: CS, a: &'a Self, b: &'a Self) -> Result<Self, SynthesisError> where ConstraintF: Field, CS: ConstraintSystem<ConstraintF>, { match (a, b) { (&Boolean::Constant(false), x) | (x, &Boolean::Constant(false)) => Ok(*x), (&Boolean::Constant(true), _) | (_, &Boolean::Constant(true)) => { Ok(Boolean::Constant(true)) }, // a OR b = NOT ((NOT a) AND b) (a @ &Boolean::Is(_), b @ &Boolean::Not(_)) | (b @ &Boolean::Not(_), a @ &Boolean::Is(_)) | (b @ &Boolean::Not(_), a @ &Boolean::Not(_)) => { Ok(Boolean::and(cs, &a.not(), &b.not())?.not()) }, (&Boolean::Is(ref a), &Boolean::Is(ref b)) => { AllocatedBit::or(cs, a, b).map(Boolean::from) }, } } /// Perform AND over two boolean operands pub fn and<'a, ConstraintF, CS>( cs: CS, a: &'a Self, b: &'a Self, ) -> Result<Self, SynthesisError> where ConstraintF: Field, CS: ConstraintSystem<ConstraintF>, { match (a, b) { // false AND x is always false (&Boolean::Constant(false), _) | (_, &Boolean::Constant(false)) => { Ok(Boolean::Constant(false)) }, // true AND x is always x (&Boolean::Constant(true), x) | (x, &Boolean::Constant(true)) => Ok(*x), // a AND (NOT b) (&Boolean::Is(ref is), &Boolean::Not(ref not)) | (&Boolean::Not(ref not), &Boolean::Is(ref is)) => { Ok(Boolean::Is(AllocatedBit::and_not(cs, is, not)?)) }, // (NOT a) AND (NOT b) = a NOR b (&Boolean::Not(ref a), &Boolean::Not(ref b)) => { Ok(Boolean::Is(AllocatedBit::nor(cs, a, b)?)) }, // a AND b (&Boolean::Is(ref a), &Boolean::Is(ref b)) => { Ok(Boolean::Is(AllocatedBit::and(cs, a, b)?)) }, } } pub fn kary_and<ConstraintF, CS>(mut cs: CS, bits: &[Self]) -> Result<Self, SynthesisError> where ConstraintF: Field, CS: ConstraintSystem<ConstraintF>, { assert!(!bits.is_empty()); let mut bits = bits.iter(); let mut cur: Self = *bits.next().unwrap(); for (i, next) in bits.enumerate() { cur = Boolean::and(cs.ns(|| format!("AND {}", i)), &cur, next)?; } Ok(cur) } /// Asserts that at least one operand is false. pub fn enforce_nand<ConstraintF, CS>(mut cs: CS, bits: &[Self]) -> Result<(), SynthesisError> where ConstraintF: Field, CS: ConstraintSystem<ConstraintF>, { let res = Self::kary_and(&mut cs, bits)?; match res { Boolean::Constant(false) => Ok(()), Boolean::Constant(true) => Err(SynthesisError::AssignmentMissing), Boolean::Is(ref res) => { cs.enforce( || "enforce nand", |lc| lc, |lc| lc, |lc| lc + res.get_variable(), ); Ok(()) }, Boolean::Not(ref res) => { cs.enforce( || "enforce nand", |lc| lc, |lc| lc, |lc| lc + CS::one() - res.get_variable(), ); Ok(()) }, } } /// Asserts that this bit_gadget representation is "in /// the field" when interpreted in big endian. pub fn enforce_in_field<ConstraintF, CS, F: PrimeField>( mut cs: CS, bits: &[Self], ) -> Result<(), SynthesisError> where ConstraintF: Field, CS: ConstraintSystem<ConstraintF>, { let mut bits_iter = bits.iter(); // b = char() - 1 let mut b = F::characteristic().to_vec(); assert_eq!(b[0] % 2, 1); b[0] -= 1; // Runs of ones in r let mut last_run = Boolean::constant(true); let mut current_run = vec![]; let mut found_one = false; let mut run_i = 0; let mut nand_i = 0; let char_num_bits = <F as PrimeField>::Params::MODULUS_BITS as usize; if bits.len() > char_num_bits { let num_extra_bits = bits.len() - char_num_bits; let mut or_result = Boolean::constant(false); for (i, should_be_zero) in bits[0..num_extra_bits].iter().enumerate() { or_result = Boolean::or( &mut cs.ns(|| format!("Check {}-th or", i)), &or_result, should_be_zero, )?; let _ = bits_iter.next().unwrap(); } or_result.enforce_equal( &mut cs.ns(|| "Check that or of extra bits is zero"), &Boolean::constant(false), )?; } for b in BitIterator::new(b) { // Skip over unset bits at the beginning found_one |= b; if !found_one { continue; } let a = bits_iter.next().unwrap(); if b { // This is part of a run of ones. current_run.push(a.clone()); } else { if !current_run.is_empty() { // This is the start of a run of zeros, but we need // to k-ary AND against `last_run` first. current_run.push(last_run); last_run = Self::kary_and(cs.ns(|| format!("run {}", run_i)), &current_run)?; run_i += 1; current_run.truncate(0); } // If `last_run` is true, `a` must be false, or it would // not be in the field. // // If `last_run` is false, `a` can be true or false. // // Ergo, at least one of `last_run` and `a` must be false. Self::enforce_nand(cs.ns(|| format!("nand {}", nand_i)), &[last_run, *a])?; nand_i += 1; } } assert!(bits_iter.next().is_none()); // We should always end in a "run" of zeros, because // the characteristic is an odd prime. So, this should // be empty. assert!(current_run.is_empty()); Ok(()) } } impl PartialEq for Boolean { fn eq(&self, other: &Self) -> bool { use self::Boolean::*; match (*self, *other) { (Is(a), Is(b)) | (Not(a), Not(b)) => a == b, (Is(a), Not(b)) | (Not(a), Is(b)) => a != b, (Is(a), Constant(b)) | (Constant(b), Is(a)) => a.value.unwrap() == b, (Not(a), Constant(b)) | (Constant(b), Not(a)) => a.value.unwrap() != b, (Constant(a), Constant(b)) => a == b, } } } impl Eq for Boolean {} impl From<AllocatedBit> for Boolean { fn from(b: AllocatedBit) -> Boolean { Boolean::Is(b) } } impl<ConstraintF: Field> AllocGadget<bool, ConstraintF> for Boolean { fn alloc<F, T, CS: ConstraintSystem<ConstraintF>>( cs: CS, value_gen: F, ) -> Result<Self, SynthesisError> where F: FnOnce() -> Result<T, SynthesisError>, T: Borrow<bool>, { AllocatedBit::alloc(cs, value_gen).map(Boolean::from) } fn alloc_input<F, T, CS: ConstraintSystem<ConstraintF>>( cs: CS, value_gen: F, ) -> Result<Self, SynthesisError> where F: FnOnce() -> Result<T, SynthesisError>, T: Borrow<bool>, { AllocatedBit::alloc_input(cs, value_gen).map(Boolean::from) } } impl<ConstraintF: Field> EqGadget<ConstraintF> for Boolean {} impl<ConstraintF: Field> ConditionalEqGadget<ConstraintF> for Boolean { fn conditional_enforce_equal<CS>( &self, mut cs: CS, other: &Self, condition: &Boolean, ) -> Result<(), SynthesisError> where CS: ConstraintSystem<ConstraintF>, { use self::Boolean::*; let one = CS::one(); let difference: LinearCombination<ConstraintF> = match (self, other) { // 1 - 1 = 0 - 0 = 0 (Constant(true), Constant(true)) | (Constant(false), Constant(false)) => return Ok(()), // false != true (Constant(_), Constant(_)) => return Err(SynthesisError::AssignmentMissing), // 1 - a (Constant(true), Is(a)) | (Is(a), Constant(true)) => { LinearCombination::zero() + one - a.get_variable() }, // a - 0 = a (Constant(false), Is(a)) | (Is(a), Constant(false)) => { LinearCombination::zero() + a.get_variable() }, // 1 - !a = 1 - (1 - a) = a (Constant(true), Not(a)) | (Not(a), Constant(true)) => { LinearCombination::zero() + a.get_variable() }, // !a - 0 = !a = 1 - a (Constant(false), Not(a)) | (Not(a), Constant(false)) => { LinearCombination::zero() + one - a.get_variable() }, // b - a, (Is(a), Is(b)) => LinearCombination::zero() + b.get_variable() - a.get_variable(), // !b - a = (1 - b) - a (Is(a), Not(b)) | (Not(b), Is(a)) => { LinearCombination::zero() + one - b.get_variable() - a.get_variable() }, // !b - !a = (1 - b) - (1 - a) = a - b, (Not(a), Not(b)) => LinearCombination::zero() + a.get_variable() - b.get_variable(), }; if let Constant(false) = condition { Ok(()) } else { cs.enforce( || "conditional_equals", |lc| difference + &lc, |lc| condition.lc(one, ConstraintF::one()) + &lc, |lc| lc, ); Ok(()) } } fn cost() -> usize { 1 } } impl<ConstraintF: Field> ToBytesGadget<ConstraintF> for Boolean { fn to_bytes<CS: ConstraintSystem<ConstraintF>>( &self, _cs: CS, ) -> Result<Vec<UInt8>, SynthesisError> { let mut bits = vec![Boolean::constant(false); 7]; bits.push(*self); bits.reverse(); let value = self.get_value().map(|val| val as u8); let byte = UInt8 { bits, value }; Ok(vec![byte]) } /// Additionally checks if the produced list of booleans is 'valid'. fn to_bytes_strict<CS: ConstraintSystem<ConstraintF>>( &self, cs: CS, ) -> Result<Vec<UInt8>, SynthesisError> { self.to_bytes(cs) } } #[cfg(test)] mod test { use super::{AllocatedBit, Boolean}; use crate::{prelude::*, test_constraint_system::TestConstraintSystem}; use algebra::{fields::bls12_381::Fr, BitIterator, Field, PrimeField, UniformRand}; use r1cs_core::ConstraintSystem; use rand::SeedableRng; use rand_xorshift::XorShiftRng; use std::str::FromStr; #[test] fn test_boolean_to_byte() { for val in [true, false].iter() { let mut cs = TestConstraintSystem::<Fr>::new(); let a: Boolean = AllocatedBit::alloc(&mut cs, || Ok(*val)).unwrap().into(); let bytes = a.to_bytes(&mut cs.ns(|| "ToBytes")).unwrap(); assert_eq!(bytes.len(), 1); let byte = &bytes[0]; assert_eq!(byte.value.unwrap(), *val as u8); for (i, bit_gadget) in byte.bits.iter().enumerate() { assert_eq!( bit_gadget.get_value().unwrap(), (byte.value.unwrap() >> i) & 1 == 1 ); } } } #[test] fn test_allocated_bit() { let mut cs = TestConstraintSystem::<Fr>::new(); AllocatedBit::alloc(&mut cs, || Ok(true)).unwrap(); assert!(cs.get("boolean") == Fr::one()); assert!(cs.is_satisfied()); cs.set("boolean", Fr::zero()); assert!(cs.is_satisfied()); cs.set("boolean", Fr::from_str("2").unwrap()); assert!(!cs.is_satisfied()); assert!(cs.which_is_unsatisfied() == Some("boolean constraint")); } #[test] fn test_xor() { for a_val in [false, true].iter() { for b_val in [false, true].iter() { let mut cs = TestConstraintSystem::<Fr>::new(); let a = AllocatedBit::alloc(cs.ns(|| "a"), || Ok(*a_val)).unwrap(); let b = AllocatedBit::alloc(cs.ns(|| "b"), || Ok(*b_val)).unwrap(); let c = AllocatedBit::xor(&mut cs, &a, &b).unwrap(); assert_eq!(c.value.unwrap(), *a_val ^ *b_val); assert!(cs.is_satisfied()); } } } #[test] fn test_or() { for a_val in [false, true].iter() { for b_val in [false, true].iter() { let mut cs = TestConstraintSystem::<Fr>::new(); let a = AllocatedBit::alloc(cs.ns(|| "a"), || Ok(*a_val)).unwrap(); let b = AllocatedBit::alloc(cs.ns(|| "b"), || Ok(*b_val)).unwrap(); let c = AllocatedBit::or(&mut cs, &a, &b).unwrap(); assert_eq!(c.value.unwrap(), *a_val | *b_val); assert!(cs.is_satisfied()); assert!(cs.get("a/boolean") == if *a_val { Field::one() } else { Field::zero() }); assert!(cs.get("b/boolean") == if *b_val { Field::one() } else { Field::zero() }); } } } #[test] fn test_and() { for a_val in [false, true].iter() { for b_val in [false, true].iter() { let mut cs = TestConstraintSystem::<Fr>::new(); let a = AllocatedBit::alloc(cs.ns(|| "a"), || Ok(*a_val)).unwrap(); let b = AllocatedBit::alloc(cs.ns(|| "b"), || Ok(*b_val)).unwrap(); let c = AllocatedBit::and(&mut cs, &a, &b).unwrap(); assert_eq!(c.value.unwrap(), *a_val & *b_val); assert!(cs.is_satisfied()); assert!(cs.get("a/boolean") == if *a_val { Field::one() } else { Field::zero() }); assert!(cs.get("b/boolean") == if *b_val { Field::one() } else { Field::zero() }); assert!( cs.get("and result") == if *a_val & *b_val { Field::one() } else { Field::zero() } ); // Invert the result and check if the constraint system is still satisfied cs.set( "and result", if *a_val & *b_val { Field::zero() } else { Field::one() }, ); assert!(!cs.is_satisfied()); } } } #[test] fn test_and_not() { for a_val in [false, true].iter() { for b_val in [false, true].iter() { let mut cs = TestConstraintSystem::<Fr>::new(); let a = AllocatedBit::alloc(cs.ns(|| "a"), || Ok(*a_val)).unwrap(); let b = AllocatedBit::alloc(cs.ns(|| "b"), || Ok(*b_val)).unwrap(); let c = AllocatedBit::and_not(&mut cs, &a, &b).unwrap(); assert_eq!(c.value.unwrap(), *a_val & !*b_val); assert!(cs.is_satisfied()); assert!(cs.get("a/boolean") == if *a_val { Field::one() } else { Field::zero() }); assert!(cs.get("b/boolean") == if *b_val { Field::one() } else { Field::zero() }); assert!( cs.get("and not result") == if *a_val & !*b_val { Field::one() } else { Field::zero() } ); // Invert the result and check if the constraint system is still satisfied cs.set( "and not result", if *a_val & !*b_val { Field::zero() } else { Field::one() }, ); assert!(!cs.is_satisfied()); } } } #[test] fn test_nor() { for a_val in [false, true].iter() { for b_val in [false, true].iter() { let mut cs = TestConstraintSystem::<Fr>::new(); let a = AllocatedBit::alloc(cs.ns(|| "a"), || Ok(*a_val)).unwrap(); let b = AllocatedBit::alloc(cs.ns(|| "b"), || Ok(*b_val)).unwrap(); let c = AllocatedBit::nor(&mut cs, &a, &b).unwrap(); assert_eq!(c.value.unwrap(), !*a_val & !*b_val); assert!(cs.is_satisfied()); assert!(cs.get("a/boolean") == if *a_val { Field::one() } else { Field::zero() }); assert!(cs.get("b/boolean") == if *b_val { Field::one() } else { Field::zero() }); assert!( cs.get("nor result") == if !*a_val & !*b_val { Field::one() } else { Field::zero() } ); // Invert the result and check if the constraint system is still satisfied cs.set( "nor result", if !*a_val & !*b_val { Field::zero() } else { Field::one() }, ); assert!(!cs.is_satisfied()); } } } #[test] fn test_enforce_equal() { for a_bool in [false, true].iter().cloned() { for b_bool in [false, true].iter().cloned() { for a_neg in [false, true].iter().cloned() { for b_neg in [false, true].iter().cloned() { let mut cs = TestConstraintSystem::<Fr>::new(); let mut a: Boolean = AllocatedBit::alloc(cs.ns(|| "a"), || Ok(a_bool)) .unwrap() .into(); let mut b: Boolean = AllocatedBit::alloc(cs.ns(|| "b"), || Ok(b_bool)) .unwrap() .into(); if a_neg { a = a.not(); } if b_neg { b = b.not(); } a.enforce_equal(&mut cs, &b).unwrap(); assert_eq!(cs.is_satisfied(), (a_bool ^ a_neg) == (b_bool ^ b_neg)); } } } } } #[test] fn test_conditional_enforce_equal() { for a_bool in [false, true].iter().cloned() { for b_bool in [false, true].iter().cloned() { for a_neg in [false, true].iter().cloned() { for b_neg in [false, true].iter().cloned() { let mut cs = TestConstraintSystem::<Fr>::new(); // First test if constraint system is satisfied // when we do want to enforce the condition. let mut a: Boolean = AllocatedBit::alloc(cs.ns(|| "a"), || Ok(a_bool)) .unwrap() .into(); let mut b: Boolean = AllocatedBit::alloc(cs.ns(|| "b"), || Ok(b_bool)) .unwrap() .into(); if a_neg { a = a.not(); } if b_neg { b = b.not(); } a.conditional_enforce_equal(&mut cs, &b, &Boolean::constant(true)) .unwrap(); assert_eq!(cs.is_satisfied(), (a_bool ^ a_neg) == (b_bool ^ b_neg)); // Now test if constraint system is satisfied even // when we don't want to enforce the condition. let mut cs = TestConstraintSystem::<Fr>::new(); let mut a: Boolean = AllocatedBit::alloc(cs.ns(|| "a"), || Ok(a_bool)) .unwrap() .into(); let mut b: Boolean = AllocatedBit::alloc(cs.ns(|| "b"), || Ok(b_bool)) .unwrap() .into(); if a_neg { a = a.not(); } if b_neg { b = b.not(); } let false_cond = AllocatedBit::alloc(cs.ns(|| "cond"), || Ok(false)) .unwrap() .into(); a.conditional_enforce_equal(&mut cs, &b, &false_cond) .unwrap(); assert!(cs.is_satisfied()); } } } } } #[test] fn test_boolean_negation() { let mut cs = TestConstraintSystem::<Fr>::new(); let mut b = Boolean::from(AllocatedBit::alloc(&mut cs, || Ok(true)).unwrap()); match b { Boolean::Is(_) => {}, _ => panic!("unexpected value"), } b = b.not(); match b { Boolean::Not(_) => {}, _ => panic!("unexpected value"), } b = b.not(); match b { Boolean::Is(_) => {}, _ => panic!("unexpected value"), } b = Boolean::constant(true); match b { Boolean::Constant(true) => {}, _ => panic!("unexpected value"), } b = b.not(); match b { Boolean::Constant(false) => {}, _ => panic!("unexpected value"), } b = b.not(); match b { Boolean::Constant(true) => {}, _ => panic!("unexpected value"), } } #[derive(Copy, Clone, Debug)] enum OperandType { True, False, AllocatedTrue, AllocatedFalse, NegatedAllocatedTrue, NegatedAllocatedFalse, } #[test] fn test_boolean_xor() { let variants = [ OperandType::True, OperandType::False, OperandType::AllocatedTrue, OperandType::AllocatedFalse, OperandType::NegatedAllocatedTrue, OperandType::NegatedAllocatedFalse, ]; for first_operand in variants.iter().cloned() { for second_operand in variants.iter().cloned() { let mut cs = TestConstraintSystem::<Fr>::new(); let a; let b; { let mut dyn_construct = |operand, name| { let cs = cs.ns(|| name); match operand { OperandType::True => Boolean::constant(true), OperandType::False => Boolean::constant(false), OperandType::AllocatedTrue => { Boolean::from(AllocatedBit::alloc(cs, || Ok(true)).unwrap()) }, OperandType::AllocatedFalse => { Boolean::from(AllocatedBit::alloc(cs, || Ok(false)).unwrap()) }, OperandType::NegatedAllocatedTrue => { Boolean::from(AllocatedBit::alloc(cs, || Ok(true)).unwrap()).not() }, OperandType::NegatedAllocatedFalse => { Boolean::from(AllocatedBit::alloc(cs, || Ok(false)).unwrap()).not() }, } }; a = dyn_construct(first_operand, "a"); b = dyn_construct(second_operand, "b"); } let c = Boolean::xor(&mut cs, &a, &b).unwrap(); assert!(cs.is_satisfied()); match (first_operand, second_operand, c) { (OperandType::True, OperandType::True, Boolean::Constant(false)) => {}, (OperandType::True, OperandType::False, Boolean::Constant(true)) => {}, (OperandType::True, OperandType::AllocatedTrue, Boolean::Not(_)) => {}, (OperandType::True, OperandType::AllocatedFalse, Boolean::Not(_)) => {}, (OperandType::True, OperandType::NegatedAllocatedTrue, Boolean::Is(_)) => {}, (OperandType::True, OperandType::NegatedAllocatedFalse, Boolean::Is(_)) => {}, (OperandType::False, OperandType::True, Boolean::Constant(true)) => {}, (OperandType::False, OperandType::False, Boolean::Constant(false)) => {}, (OperandType::False, OperandType::AllocatedTrue, Boolean::Is(_)) => {}, (OperandType::False, OperandType::AllocatedFalse, Boolean::Is(_)) => {}, (OperandType::False, OperandType::NegatedAllocatedTrue, Boolean::Not(_)) => {}, (OperandType::False, OperandType::NegatedAllocatedFalse, Boolean::Not(_)) => {}, (OperandType::AllocatedTrue, OperandType::True, Boolean::Not(_)) => {}, (OperandType::AllocatedTrue, OperandType::False, Boolean::Is(_)) => {}, ( OperandType::AllocatedTrue, OperandType::AllocatedTrue, Boolean::Is(ref v), ) => { assert!(cs.get("xor result") == Field::zero()); assert_eq!(v.value, Some(false)); }, ( OperandType::AllocatedTrue, OperandType::AllocatedFalse, Boolean::Is(ref v), ) => { assert!(cs.get("xor result") == Field::one()); assert_eq!(v.value, Some(true)); }, ( OperandType::AllocatedTrue, OperandType::NegatedAllocatedTrue, Boolean::Not(ref v), ) => { assert!(cs.get("xor result") == Field::zero()); assert_eq!(v.value, Some(false)); }, ( OperandType::AllocatedTrue, OperandType::NegatedAllocatedFalse, Boolean::Not(ref v), ) => { assert!(cs.get("xor result") == Field::one()); assert_eq!(v.value, Some(true)); }, (OperandType::AllocatedFalse, OperandType::True, Boolean::Not(_)) => {}, (OperandType::AllocatedFalse, OperandType::False, Boolean::Is(_)) => {}, ( OperandType::AllocatedFalse, OperandType::AllocatedTrue, Boolean::Is(ref v), ) => { assert!(cs.get("xor result") == Field::one()); assert_eq!(v.value, Some(true)); }, ( OperandType::AllocatedFalse, OperandType::AllocatedFalse, Boolean::Is(ref v), ) => { assert!(cs.get("xor result") == Field::zero()); assert_eq!(v.value, Some(false)); }, ( OperandType::AllocatedFalse, OperandType::NegatedAllocatedTrue, Boolean::Not(ref v), ) => { assert!(cs.get("xor result") == Field::one()); assert_eq!(v.value, Some(true)); }, ( OperandType::AllocatedFalse, OperandType::NegatedAllocatedFalse, Boolean::Not(ref v), ) => { assert!(cs.get("xor result") == Field::zero()); assert_eq!(v.value, Some(false)); }, (OperandType::NegatedAllocatedTrue, OperandType::True, Boolean::Is(_)) => {}, (OperandType::NegatedAllocatedTrue, OperandType::False, Boolean::Not(_)) => {}, ( OperandType::NegatedAllocatedTrue, OperandType::AllocatedTrue, Boolean::Not(ref v), ) => { assert!(cs.get("xor result") == Field::zero()); assert_eq!(v.value, Some(false)); }, ( OperandType::NegatedAllocatedTrue, OperandType::AllocatedFalse, Boolean::Not(ref v), ) => { assert!(cs.get("xor result") == Field::one()); assert_eq!(v.value, Some(true)); }, ( OperandType::NegatedAllocatedTrue, OperandType::NegatedAllocatedTrue, Boolean::Is(ref v), ) => { assert!(cs.get("xor result") == Field::zero()); assert_eq!(v.value, Some(false)); }, ( OperandType::NegatedAllocatedTrue, OperandType::NegatedAllocatedFalse, Boolean::Is(ref v), ) => { assert!(cs.get("xor result") == Field::one()); assert_eq!(v.value, Some(true)); }, (OperandType::NegatedAllocatedFalse, OperandType::True, Boolean::Is(_)) => {}, (OperandType::NegatedAllocatedFalse, OperandType::False, Boolean::Not(_)) => {}, ( OperandType::NegatedAllocatedFalse, OperandType::AllocatedTrue, Boolean::Not(ref v), ) => { assert!(cs.get("xor result") == Field::one()); assert_eq!(v.value, Some(true)); }, ( OperandType::NegatedAllocatedFalse, OperandType::AllocatedFalse, Boolean::Not(ref v), ) => { assert!(cs.get("xor result") == Field::zero()); assert_eq!(v.value, Some(false)); }, ( OperandType::NegatedAllocatedFalse, OperandType::NegatedAllocatedTrue, Boolean::Is(ref v), ) => { assert!(cs.get("xor result") == Field::one()); assert_eq!(v.value, Some(true)); }, ( OperandType::NegatedAllocatedFalse, OperandType::NegatedAllocatedFalse, Boolean::Is(ref v), ) => { assert!(cs.get("xor result") == Field::zero()); assert_eq!(v.value, Some(false)); }, _ => panic!("this should never be encountered"), } } } } #[test] fn test_boolean_or() { let variants = [ OperandType::True, OperandType::False, OperandType::AllocatedTrue, OperandType::AllocatedFalse, OperandType::NegatedAllocatedTrue, OperandType::NegatedAllocatedFalse, ]; for first_operand in variants.iter().cloned() { for second_operand in variants.iter().cloned() { let mut cs = TestConstraintSystem::<Fr>::new(); let a; let b; { let mut dyn_construct = |operand, name| { let cs = cs.ns(|| name); match operand { OperandType::True => Boolean::constant(true), OperandType::False => Boolean::constant(false), OperandType::AllocatedTrue => { Boolean::from(AllocatedBit::alloc(cs, || Ok(true)).unwrap()) }, OperandType::AllocatedFalse => { Boolean::from(AllocatedBit::alloc(cs, || Ok(false)).unwrap()) }, OperandType::NegatedAllocatedTrue => { Boolean::from(AllocatedBit::alloc(cs, || Ok(true)).unwrap()).not() }, OperandType::NegatedAllocatedFalse => { Boolean::from(AllocatedBit::alloc(cs, || Ok(false)).unwrap()).not() }, } }; a = dyn_construct(first_operand, "a"); b = dyn_construct(second_operand, "b"); } let c = Boolean::or(&mut cs, &a, &b).unwrap(); assert!(cs.is_satisfied()); match (first_operand, second_operand, c) { (OperandType::True, OperandType::True, Boolean::Constant(true)) => {}, (OperandType::True, OperandType::False, Boolean::Constant(true)) => {}, (OperandType::True, OperandType::AllocatedTrue, Boolean::Constant(true)) => {}, (OperandType::True, OperandType::AllocatedFalse, Boolean::Constant(true)) => {}, ( OperandType::True, OperandType::NegatedAllocatedTrue, Boolean::Constant(true), ) => {}, ( OperandType::True, OperandType::NegatedAllocatedFalse, Boolean::Constant(true), ) => {}, (OperandType::False, OperandType::True, Boolean::Constant(true)) => {}, (OperandType::False, OperandType::False, Boolean::Constant(false)) => {}, (OperandType::False, OperandType::AllocatedTrue, Boolean::Is(_)) => {}, (OperandType::False, OperandType::AllocatedFalse, Boolean::Is(_)) => {}, (OperandType::False, OperandType::NegatedAllocatedTrue, Boolean::Not(_)) => {}, (OperandType::False, OperandType::NegatedAllocatedFalse, Boolean::Not(_)) => {}, (OperandType::AllocatedTrue, OperandType::True, Boolean::Constant(true)) => {}, (OperandType::AllocatedTrue, OperandType::False, Boolean::Is(_)) => {}, ( OperandType::AllocatedTrue, OperandType::AllocatedTrue, Boolean::Is(ref v), ) => { assert_eq!(v.value, Some(true)); }, ( OperandType::AllocatedTrue, OperandType::AllocatedFalse, Boolean::Is(ref v), ) => { assert_eq!(v.value, Some(true)); }, ( OperandType::AllocatedTrue, OperandType::NegatedAllocatedTrue, Boolean::Not(ref v), ) => { assert_eq!(v.value, Some(false)); }, ( OperandType::AllocatedTrue, OperandType::NegatedAllocatedFalse, Boolean::Not(ref v), ) => { assert_eq!(v.value, Some(false)); }, (OperandType::AllocatedFalse, OperandType::True, Boolean::Constant(true)) => {}, (OperandType::AllocatedFalse, OperandType::False, Boolean::Is(_)) => {}, ( OperandType::AllocatedFalse, OperandType::AllocatedTrue, Boolean::Is(ref v), ) => { assert_eq!(v.value, Some(true)); }, ( OperandType::AllocatedFalse, OperandType::AllocatedFalse, Boolean::Is(ref v), ) => { assert_eq!(v.value, Some(false)); }, ( OperandType::AllocatedFalse, OperandType::NegatedAllocatedTrue, Boolean::Not(ref v), ) => { assert_eq!(v.value, Some(true)); }, ( OperandType::AllocatedFalse, OperandType::NegatedAllocatedFalse, Boolean::Not(ref v), ) => { assert_eq!(v.value, Some(false)); }, ( OperandType::NegatedAllocatedTrue, OperandType::True, Boolean::Constant(true), ) => {}, (OperandType::NegatedAllocatedTrue, OperandType::False, Boolean::Not(_)) => {}, ( OperandType::NegatedAllocatedTrue, OperandType::AllocatedTrue, Boolean::Not(ref v), ) => { assert_eq!(v.value, Some(false)); }, ( OperandType::NegatedAllocatedTrue, OperandType::AllocatedFalse, Boolean::Not(ref v), ) => { assert_eq!(v.value, Some(true)); }, ( OperandType::NegatedAllocatedTrue, OperandType::NegatedAllocatedTrue, Boolean::Not(ref v), ) => { assert_eq!(v.value, Some(true)); }, ( OperandType::NegatedAllocatedTrue, OperandType::NegatedAllocatedFalse, Boolean::Not(ref v), ) => { assert_eq!(v.value, Some(false)); }, ( OperandType::NegatedAllocatedFalse, OperandType::True, Boolean::Constant(true), ) => {}, (OperandType::NegatedAllocatedFalse, OperandType::False, Boolean::Not(_)) => {}, ( OperandType::NegatedAllocatedFalse, OperandType::AllocatedTrue, Boolean::Not(ref v), ) => { assert_eq!(v.value, Some(false)); }, ( OperandType::NegatedAllocatedFalse, OperandType::AllocatedFalse, Boolean::Not(ref v), ) => { assert_eq!(v.value, Some(false)); }, ( OperandType::NegatedAllocatedFalse, OperandType::NegatedAllocatedTrue, Boolean::Not(ref v), ) => { assert_eq!(v.value, Some(false)); }, ( OperandType::NegatedAllocatedFalse, OperandType::NegatedAllocatedFalse, Boolean::Not(ref v), ) => { assert_eq!(v.value, Some(false)); }, _ => panic!( "this should never be encountered, in case: (a = {:?}, b = {:?}, c = {:?})", a, b, c ), } } } } #[test] fn test_boolean_and() { let variants = [ OperandType::True, OperandType::False, OperandType::AllocatedTrue, OperandType::AllocatedFalse, OperandType::NegatedAllocatedTrue, OperandType::NegatedAllocatedFalse, ]; for first_operand in variants.iter().cloned() { for second_operand in variants.iter().cloned() { let mut cs = TestConstraintSystem::<Fr>::new(); let a; let b; { let mut dyn_construct = |operand, name| { let cs = cs.ns(|| name); match operand { OperandType::True => Boolean::constant(true), OperandType::False => Boolean::constant(false), OperandType::AllocatedTrue => { Boolean::from(AllocatedBit::alloc(cs, || Ok(true)).unwrap()) }, OperandType::AllocatedFalse => { Boolean::from(AllocatedBit::alloc(cs, || Ok(false)).unwrap()) }, OperandType::NegatedAllocatedTrue => { Boolean::from(AllocatedBit::alloc(cs, || Ok(true)).unwrap()).not() }, OperandType::NegatedAllocatedFalse => { Boolean::from(AllocatedBit::alloc(cs, || Ok(false)).unwrap()).not() }, } }; a = dyn_construct(first_operand, "a"); b = dyn_construct(second_operand, "b"); } let c = Boolean::and(&mut cs, &a, &b).unwrap(); assert!(cs.is_satisfied()); match (first_operand, second_operand, c) { (OperandType::True, OperandType::True, Boolean::Constant(true)) => {}, (OperandType::True, OperandType::False, Boolean::Constant(false)) => {}, (OperandType::True, OperandType::AllocatedTrue, Boolean::Is(_)) => {}, (OperandType::True, OperandType::AllocatedFalse, Boolean::Is(_)) => {}, (OperandType::True, OperandType::NegatedAllocatedTrue, Boolean::Not(_)) => {}, (OperandType::True, OperandType::NegatedAllocatedFalse, Boolean::Not(_)) => {}, (OperandType::False, OperandType::True, Boolean::Constant(false)) => {}, (OperandType::False, OperandType::False, Boolean::Constant(false)) => {}, (OperandType::False, OperandType::AllocatedTrue, Boolean::Constant(false)) => { }, (OperandType::False, OperandType::AllocatedFalse, Boolean::Constant(false)) => { }, ( OperandType::False, OperandType::NegatedAllocatedTrue, Boolean::Constant(false), ) => {}, ( OperandType::False, OperandType::NegatedAllocatedFalse, Boolean::Constant(false), ) => {}, (OperandType::AllocatedTrue, OperandType::True, Boolean::Is(_)) => {}, (OperandType::AllocatedTrue, OperandType::False, Boolean::Constant(false)) => { }, ( OperandType::AllocatedTrue, OperandType::AllocatedTrue, Boolean::Is(ref v), ) => { assert!(cs.get("and result") == Field::one()); assert_eq!(v.value, Some(true)); }, ( OperandType::AllocatedTrue, OperandType::AllocatedFalse, Boolean::Is(ref v), ) => { assert!(cs.get("and result") == Field::zero()); assert_eq!(v.value, Some(false)); }, ( OperandType::AllocatedTrue, OperandType::NegatedAllocatedTrue, Boolean::Is(ref v), ) => { assert!(cs.get("and not result") == Field::zero()); assert_eq!(v.value, Some(false)); }, ( OperandType::AllocatedTrue, OperandType::NegatedAllocatedFalse, Boolean::Is(ref v), ) => { assert!(cs.get("and not result") == Field::one()); assert_eq!(v.value, Some(true)); }, (OperandType::AllocatedFalse, OperandType::True, Boolean::Is(_)) => {}, (OperandType::AllocatedFalse, OperandType::False, Boolean::Constant(false)) => { }, ( OperandType::AllocatedFalse, OperandType::AllocatedTrue, Boolean::Is(ref v), ) => { assert!(cs.get("and result") == Field::zero()); assert_eq!(v.value, Some(false)); }, ( OperandType::AllocatedFalse, OperandType::AllocatedFalse, Boolean::Is(ref v), ) => { assert!(cs.get("and result") == Field::zero()); assert_eq!(v.value, Some(false)); }, ( OperandType::AllocatedFalse, OperandType::NegatedAllocatedTrue, Boolean::Is(ref v), ) => { assert!(cs.get("and not result") == Field::zero()); assert_eq!(v.value, Some(false)); }, ( OperandType::AllocatedFalse, OperandType::NegatedAllocatedFalse, Boolean::Is(ref v), ) => { assert!(cs.get("and not result") == Field::zero()); assert_eq!(v.value, Some(false)); }, (OperandType::NegatedAllocatedTrue, OperandType::True, Boolean::Not(_)) => {}, ( OperandType::NegatedAllocatedTrue, OperandType::False, Boolean::Constant(false), ) => {}, ( OperandType::NegatedAllocatedTrue, OperandType::AllocatedTrue, Boolean::Is(ref v), ) => { assert!(cs.get("and not result") == Field::zero()); assert_eq!(v.value, Some(false)); }, ( OperandType::NegatedAllocatedTrue, OperandType::AllocatedFalse, Boolean::Is(ref v), ) => { assert!(cs.get("and not result") == Field::zero()); assert_eq!(v.value, Some(false)); }, ( OperandType::NegatedAllocatedTrue, OperandType::NegatedAllocatedTrue, Boolean::Is(ref v), ) => { assert!(cs.get("nor result") == Field::zero()); assert_eq!(v.value, Some(false)); }, ( OperandType::NegatedAllocatedTrue, OperandType::NegatedAllocatedFalse, Boolean::Is(ref v), ) => { assert!(cs.get("nor result") == Field::zero()); assert_eq!(v.value, Some(false)); }, (OperandType::NegatedAllocatedFalse, OperandType::True, Boolean::Not(_)) => {}, ( OperandType::NegatedAllocatedFalse, OperandType::False, Boolean::Constant(false), ) => {}, ( OperandType::NegatedAllocatedFalse, OperandType::AllocatedTrue, Boolean::Is(ref v), ) => { assert!(cs.get("and not result") == Field::one()); assert_eq!(v.value, Some(true)); }, ( OperandType::NegatedAllocatedFalse, OperandType::AllocatedFalse, Boolean::Is(ref v), ) => { assert!(cs.get("and not result") == Field::zero()); assert_eq!(v.value, Some(false)); }, ( OperandType::NegatedAllocatedFalse, OperandType::NegatedAllocatedTrue, Boolean::Is(ref v), ) => { assert!(cs.get("nor result") == Field::zero()); assert_eq!(v.value, Some(false)); }, ( OperandType::NegatedAllocatedFalse, OperandType::NegatedAllocatedFalse, Boolean::Is(ref v), ) => { assert!(cs.get("nor result") == Field::one()); assert_eq!(v.value, Some(true)); }, _ => { panic!( "unexpected behavior at {:?} AND {:?}", first_operand, second_operand ); }, } } } } #[test] fn test_enforce_in_field() { { let mut cs = TestConstraintSystem::<Fr>::new(); let mut bits = vec![]; for (i, b) in BitIterator::new(Fr::characteristic()).skip(1).enumerate() { bits.push(Boolean::from( AllocatedBit::alloc(cs.ns(|| format!("bit_gadget {}", i)), || Ok(b)).unwrap(), )); } Boolean::enforce_in_field::<_, _, Fr>(&mut cs, &bits).unwrap(); assert!(!cs.is_satisfied()); } let mut rng = XorShiftRng::seed_from_u64(1231275789u64); for _ in 0..1000 { let r = Fr::rand(&mut rng); let mut cs = TestConstraintSystem::<Fr>::new(); let mut bits = vec![]; for (i, b) in BitIterator::new(r.into_repr()).skip(1).enumerate() { bits.push(Boolean::from( AllocatedBit::alloc(cs.ns(|| format!("bit_gadget {}", i)), || Ok(b)).unwrap(), )); } Boolean::enforce_in_field::<_, _, Fr>(&mut cs, &bits).unwrap(); assert!(cs.is_satisfied()); } // for _ in 0..1000 { // // Sample a random element not in the field // let r = loop { // let mut a = Fr::rand(&mut rng).into_repr(); // let b = Fr::rand(&mut rng).into_repr(); // a.add_nocarry(&b); // // we're shaving off the high bit_gadget later // a.as_mut()[3] &= 0x7fffffffffffffff; // if Fr::from_repr(a).is_err() { // break a; // } // }; // let mut cs = TestConstraintSystem::<Fr>::new(); // let mut bits = vec![]; // for (i, b) in BitIterator::new(r).skip(1).enumerate() { // bits.push(Boolean::from( // AllocatedBit::alloc(cs.ns(|| format!("bit_gadget {}", // i)), Some(b)) .unwrap(), // )); // } // Boolean::enforce_in_field::<_, _, Fr>(&mut cs, &bits).unwrap(); // assert!(!cs.is_satisfied()); // } } #[test] fn test_enforce_nand() { { let mut cs = TestConstraintSystem::<Fr>::new(); assert!(Boolean::enforce_nand(&mut cs, &[Boolean::constant(false)]).is_ok()); assert!(Boolean::enforce_nand(&mut cs, &[Boolean::constant(true)]).is_err()); } for i in 1..5 { // with every possible assignment for them for mut b in 0..(1 << i) { // with every possible negation for mut n in 0..(1 << i) { let mut cs = TestConstraintSystem::<Fr>::new(); let mut expected = true; let mut bits = vec![]; for j in 0..i { expected &= b & 1 == 1; if n & 1 == 1 { bits.push(Boolean::from( AllocatedBit::alloc(cs.ns(|| format!("bit_gadget {}", j)), || { Ok(b & 1 == 1) }) .unwrap(), )); } else { bits.push( Boolean::from( AllocatedBit::alloc( cs.ns(|| format!("bit_gadget {}", j)), || Ok(b & 1 == 0), ) .unwrap(), ) .not(), ); } b >>= 1; n >>= 1; } let expected = !expected; Boolean::enforce_nand(&mut cs, &bits).unwrap(); if expected { assert!(cs.is_satisfied()); } else { assert!(!cs.is_satisfied()); } } } } } #[test] fn test_kary_and() { // test different numbers of operands for i in 1..15 { // with every possible assignment for them for mut b in 0..(1 << i) { let mut cs = TestConstraintSystem::<Fr>::new(); let mut expected = true; let mut bits = vec![]; for j in 0..i { expected &= b & 1 == 1; bits.push(Boolean::from( AllocatedBit::alloc(cs.ns(|| format!("bit_gadget {}", j)), || { Ok(b & 1 == 1) }) .unwrap(), )); b >>= 1; } let r = Boolean::kary_and(&mut cs, &bits).unwrap(); assert!(cs.is_satisfied()); match r { Boolean::Is(ref r) => { assert_eq!(r.value.unwrap(), expected); }, _ => unreachable!(), } } } } }
36.812006
100
0.412711
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_boolean_to_byte() {\n for val in [true, false].iter() {\n let mut cs = TestConstraintSystem::<Fr>::new();\n let a: Boolean = AllocatedBit::alloc(&mut cs, || Ok(*val)).unwrap().into();\n let bytes = a.to_bytes(&mut cs.ns(|| \"ToBytes\")).unwrap();\n assert_eq!(bytes.len(), 1);\n let byte = &bytes[0];\n assert_eq!(byte.value.unwrap(), *val as u8);\n\n for (i, bit_gadget) in byte.bits.iter().enumerate() {\n assert_eq!(\n bit_gadget.get_value().unwrap(),\n (byte.value.unwrap() >> i) & 1 == 1\n );\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_allocated_bit() {\n let mut cs = TestConstraintSystem::<Fr>::new();\n\n AllocatedBit::alloc(&mut cs, || Ok(true)).unwrap();\n assert!(cs.get(\"boolean\") == Fr::one());\n assert!(cs.is_satisfied());\n cs.set(\"boolean\", Fr::zero());\n assert!(cs.is_satisfied());\n cs.set(\"boolean\", Fr::from_str(\"2\").unwrap());\n assert!(!cs.is_satisfied());\n assert!(cs.which_is_unsatisfied() == Some(\"boolean constraint\"));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_xor() {\n for a_val in [false, true].iter() {\n for b_val in [false, true].iter() {\n let mut cs = TestConstraintSystem::<Fr>::new();\n let a = AllocatedBit::alloc(cs.ns(|| \"a\"), || Ok(*a_val)).unwrap();\n let b = AllocatedBit::alloc(cs.ns(|| \"b\"), || Ok(*b_val)).unwrap();\n let c = AllocatedBit::xor(&mut cs, &a, &b).unwrap();\n assert_eq!(c.value.unwrap(), *a_val ^ *b_val);\n\n assert!(cs.is_satisfied());\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_or() {\n for a_val in [false, true].iter() {\n for b_val in [false, true].iter() {\n let mut cs = TestConstraintSystem::<Fr>::new();\n let a = AllocatedBit::alloc(cs.ns(|| \"a\"), || Ok(*a_val)).unwrap();\n let b = AllocatedBit::alloc(cs.ns(|| \"b\"), || Ok(*b_val)).unwrap();\n let c = AllocatedBit::or(&mut cs, &a, &b).unwrap();\n assert_eq!(c.value.unwrap(), *a_val | *b_val);\n\n assert!(cs.is_satisfied());\n assert!(cs.get(\"a/boolean\") == if *a_val { Field::one() } else { Field::zero() });\n assert!(cs.get(\"b/boolean\") == if *b_val { Field::one() } else { Field::zero() });\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_and() {\n for a_val in [false, true].iter() {\n for b_val in [false, true].iter() {\n let mut cs = TestConstraintSystem::<Fr>::new();\n let a = AllocatedBit::alloc(cs.ns(|| \"a\"), || Ok(*a_val)).unwrap();\n let b = AllocatedBit::alloc(cs.ns(|| \"b\"), || Ok(*b_val)).unwrap();\n let c = AllocatedBit::and(&mut cs, &a, &b).unwrap();\n assert_eq!(c.value.unwrap(), *a_val & *b_val);\n\n assert!(cs.is_satisfied());\n assert!(cs.get(\"a/boolean\") == if *a_val { Field::one() } else { Field::zero() });\n assert!(cs.get(\"b/boolean\") == if *b_val { Field::one() } else { Field::zero() });\n assert!(\n cs.get(\"and result\")\n == if *a_val & *b_val {\n Field::one()\n } else {\n Field::zero()\n }\n );\n\n // Invert the result and check if the constraint system is still satisfied\n cs.set(\n \"and result\",\n if *a_val & *b_val {\n Field::zero()\n } else {\n Field::one()\n },\n );\n assert!(!cs.is_satisfied());\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_and_not() {\n for a_val in [false, true].iter() {\n for b_val in [false, true].iter() {\n let mut cs = TestConstraintSystem::<Fr>::new();\n let a = AllocatedBit::alloc(cs.ns(|| \"a\"), || Ok(*a_val)).unwrap();\n let b = AllocatedBit::alloc(cs.ns(|| \"b\"), || Ok(*b_val)).unwrap();\n let c = AllocatedBit::and_not(&mut cs, &a, &b).unwrap();\n assert_eq!(c.value.unwrap(), *a_val & !*b_val);\n\n assert!(cs.is_satisfied());\n assert!(cs.get(\"a/boolean\") == if *a_val { Field::one() } else { Field::zero() });\n assert!(cs.get(\"b/boolean\") == if *b_val { Field::one() } else { Field::zero() });\n assert!(\n cs.get(\"and not result\")\n == if *a_val & !*b_val {\n Field::one()\n } else {\n Field::zero()\n }\n );\n\n // Invert the result and check if the constraint system is still satisfied\n cs.set(\n \"and not result\",\n if *a_val & !*b_val {\n Field::zero()\n } else {\n Field::one()\n },\n );\n assert!(!cs.is_satisfied());\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_nor() {\n for a_val in [false, true].iter() {\n for b_val in [false, true].iter() {\n let mut cs = TestConstraintSystem::<Fr>::new();\n let a = AllocatedBit::alloc(cs.ns(|| \"a\"), || Ok(*a_val)).unwrap();\n let b = AllocatedBit::alloc(cs.ns(|| \"b\"), || Ok(*b_val)).unwrap();\n let c = AllocatedBit::nor(&mut cs, &a, &b).unwrap();\n assert_eq!(c.value.unwrap(), !*a_val & !*b_val);\n\n assert!(cs.is_satisfied());\n assert!(cs.get(\"a/boolean\") == if *a_val { Field::one() } else { Field::zero() });\n assert!(cs.get(\"b/boolean\") == if *b_val { Field::one() } else { Field::zero() });\n assert!(\n cs.get(\"nor result\")\n == if !*a_val & !*b_val {\n Field::one()\n } else {\n Field::zero()\n }\n );\n\n // Invert the result and check if the constraint system is still satisfied\n cs.set(\n \"nor result\",\n if !*a_val & !*b_val {\n Field::zero()\n } else {\n Field::one()\n },\n );\n assert!(!cs.is_satisfied());\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_enforce_equal() {\n for a_bool in [false, true].iter().cloned() {\n for b_bool in [false, true].iter().cloned() {\n for a_neg in [false, true].iter().cloned() {\n for b_neg in [false, true].iter().cloned() {\n let mut cs = TestConstraintSystem::<Fr>::new();\n\n let mut a: Boolean = AllocatedBit::alloc(cs.ns(|| \"a\"), || Ok(a_bool))\n .unwrap()\n .into();\n let mut b: Boolean = AllocatedBit::alloc(cs.ns(|| \"b\"), || Ok(b_bool))\n .unwrap()\n .into();\n\n if a_neg {\n a = a.not();\n }\n if b_neg {\n b = b.not();\n }\n\n a.enforce_equal(&mut cs, &b).unwrap();\n\n assert_eq!(cs.is_satisfied(), (a_bool ^ a_neg) == (b_bool ^ b_neg));\n }\n }\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_conditional_enforce_equal() {\n for a_bool in [false, true].iter().cloned() {\n for b_bool in [false, true].iter().cloned() {\n for a_neg in [false, true].iter().cloned() {\n for b_neg in [false, true].iter().cloned() {\n let mut cs = TestConstraintSystem::<Fr>::new();\n\n // First test if constraint system is satisfied\n // when we do want to enforce the condition.\n let mut a: Boolean = AllocatedBit::alloc(cs.ns(|| \"a\"), || Ok(a_bool))\n .unwrap()\n .into();\n let mut b: Boolean = AllocatedBit::alloc(cs.ns(|| \"b\"), || Ok(b_bool))\n .unwrap()\n .into();\n\n if a_neg {\n a = a.not();\n }\n if b_neg {\n b = b.not();\n }\n\n a.conditional_enforce_equal(&mut cs, &b, &Boolean::constant(true))\n .unwrap();\n\n assert_eq!(cs.is_satisfied(), (a_bool ^ a_neg) == (b_bool ^ b_neg));\n\n // Now test if constraint system is satisfied even\n // when we don't want to enforce the condition.\n let mut cs = TestConstraintSystem::<Fr>::new();\n\n let mut a: Boolean = AllocatedBit::alloc(cs.ns(|| \"a\"), || Ok(a_bool))\n .unwrap()\n .into();\n let mut b: Boolean = AllocatedBit::alloc(cs.ns(|| \"b\"), || Ok(b_bool))\n .unwrap()\n .into();\n\n if a_neg {\n a = a.not();\n }\n if b_neg {\n b = b.not();\n }\n\n let false_cond = AllocatedBit::alloc(cs.ns(|| \"cond\"), || Ok(false))\n .unwrap()\n .into();\n a.conditional_enforce_equal(&mut cs, &b, &false_cond)\n .unwrap();\n\n assert!(cs.is_satisfied());\n }\n }\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_boolean_negation() {\n let mut cs = TestConstraintSystem::<Fr>::new();\n\n let mut b = Boolean::from(AllocatedBit::alloc(&mut cs, || Ok(true)).unwrap());\n\n match b {\n Boolean::Is(_) => {},\n _ => panic!(\"unexpected value\"),\n }\n\n b = b.not();\n\n match b {\n Boolean::Not(_) => {},\n _ => panic!(\"unexpected value\"),\n }\n\n b = b.not();\n\n match b {\n Boolean::Is(_) => {},\n _ => panic!(\"unexpected value\"),\n }\n\n b = Boolean::constant(true);\n\n match b {\n Boolean::Constant(true) => {},\n _ => panic!(\"unexpected value\"),\n }\n\n b = b.not();\n\n match b {\n Boolean::Constant(false) => {},\n _ => panic!(\"unexpected value\"),\n }\n\n b = b.not();\n\n match b {\n Boolean::Constant(true) => {},\n _ => panic!(\"unexpected value\"),\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_boolean_xor() {\n let variants = [\n OperandType::True,\n OperandType::False,\n OperandType::AllocatedTrue,\n OperandType::AllocatedFalse,\n OperandType::NegatedAllocatedTrue,\n OperandType::NegatedAllocatedFalse,\n ];\n\n for first_operand in variants.iter().cloned() {\n for second_operand in variants.iter().cloned() {\n let mut cs = TestConstraintSystem::<Fr>::new();\n\n let a;\n let b;\n\n {\n let mut dyn_construct = |operand, name| {\n let cs = cs.ns(|| name);\n\n match operand {\n OperandType::True => Boolean::constant(true),\n OperandType::False => Boolean::constant(false),\n OperandType::AllocatedTrue => {\n Boolean::from(AllocatedBit::alloc(cs, || Ok(true)).unwrap())\n },\n OperandType::AllocatedFalse => {\n Boolean::from(AllocatedBit::alloc(cs, || Ok(false)).unwrap())\n },\n OperandType::NegatedAllocatedTrue => {\n Boolean::from(AllocatedBit::alloc(cs, || Ok(true)).unwrap()).not()\n },\n OperandType::NegatedAllocatedFalse => {\n Boolean::from(AllocatedBit::alloc(cs, || Ok(false)).unwrap()).not()\n },\n }\n };\n\n a = dyn_construct(first_operand, \"a\");\n b = dyn_construct(second_operand, \"b\");\n }\n\n let c = Boolean::xor(&mut cs, &a, &b).unwrap();\n\n assert!(cs.is_satisfied());\n\n match (first_operand, second_operand, c) {\n (OperandType::True, OperandType::True, Boolean::Constant(false)) => {},\n (OperandType::True, OperandType::False, Boolean::Constant(true)) => {},\n (OperandType::True, OperandType::AllocatedTrue, Boolean::Not(_)) => {},\n (OperandType::True, OperandType::AllocatedFalse, Boolean::Not(_)) => {},\n (OperandType::True, OperandType::NegatedAllocatedTrue, Boolean::Is(_)) => {},\n (OperandType::True, OperandType::NegatedAllocatedFalse, Boolean::Is(_)) => {},\n\n (OperandType::False, OperandType::True, Boolean::Constant(true)) => {},\n (OperandType::False, OperandType::False, Boolean::Constant(false)) => {},\n (OperandType::False, OperandType::AllocatedTrue, Boolean::Is(_)) => {},\n (OperandType::False, OperandType::AllocatedFalse, Boolean::Is(_)) => {},\n (OperandType::False, OperandType::NegatedAllocatedTrue, Boolean::Not(_)) => {},\n (OperandType::False, OperandType::NegatedAllocatedFalse, Boolean::Not(_)) => {},\n\n (OperandType::AllocatedTrue, OperandType::True, Boolean::Not(_)) => {},\n (OperandType::AllocatedTrue, OperandType::False, Boolean::Is(_)) => {},\n (\n OperandType::AllocatedTrue,\n OperandType::AllocatedTrue,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"xor result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::AllocatedTrue,\n OperandType::AllocatedFalse,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"xor result\") == Field::one());\n assert_eq!(v.value, Some(true));\n },\n (\n OperandType::AllocatedTrue,\n OperandType::NegatedAllocatedTrue,\n Boolean::Not(ref v),\n ) => {\n assert!(cs.get(\"xor result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::AllocatedTrue,\n OperandType::NegatedAllocatedFalse,\n Boolean::Not(ref v),\n ) => {\n assert!(cs.get(\"xor result\") == Field::one());\n assert_eq!(v.value, Some(true));\n },\n\n (OperandType::AllocatedFalse, OperandType::True, Boolean::Not(_)) => {},\n (OperandType::AllocatedFalse, OperandType::False, Boolean::Is(_)) => {},\n (\n OperandType::AllocatedFalse,\n OperandType::AllocatedTrue,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"xor result\") == Field::one());\n assert_eq!(v.value, Some(true));\n },\n (\n OperandType::AllocatedFalse,\n OperandType::AllocatedFalse,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"xor result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::AllocatedFalse,\n OperandType::NegatedAllocatedTrue,\n Boolean::Not(ref v),\n ) => {\n assert!(cs.get(\"xor result\") == Field::one());\n assert_eq!(v.value, Some(true));\n },\n (\n OperandType::AllocatedFalse,\n OperandType::NegatedAllocatedFalse,\n Boolean::Not(ref v),\n ) => {\n assert!(cs.get(\"xor result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n\n (OperandType::NegatedAllocatedTrue, OperandType::True, Boolean::Is(_)) => {},\n (OperandType::NegatedAllocatedTrue, OperandType::False, Boolean::Not(_)) => {},\n (\n OperandType::NegatedAllocatedTrue,\n OperandType::AllocatedTrue,\n Boolean::Not(ref v),\n ) => {\n assert!(cs.get(\"xor result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::NegatedAllocatedTrue,\n OperandType::AllocatedFalse,\n Boolean::Not(ref v),\n ) => {\n assert!(cs.get(\"xor result\") == Field::one());\n assert_eq!(v.value, Some(true));\n },\n (\n OperandType::NegatedAllocatedTrue,\n OperandType::NegatedAllocatedTrue,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"xor result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::NegatedAllocatedTrue,\n OperandType::NegatedAllocatedFalse,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"xor result\") == Field::one());\n assert_eq!(v.value, Some(true));\n },\n\n (OperandType::NegatedAllocatedFalse, OperandType::True, Boolean::Is(_)) => {},\n (OperandType::NegatedAllocatedFalse, OperandType::False, Boolean::Not(_)) => {},\n (\n OperandType::NegatedAllocatedFalse,\n OperandType::AllocatedTrue,\n Boolean::Not(ref v),\n ) => {\n assert!(cs.get(\"xor result\") == Field::one());\n assert_eq!(v.value, Some(true));\n },\n (\n OperandType::NegatedAllocatedFalse,\n OperandType::AllocatedFalse,\n Boolean::Not(ref v),\n ) => {\n assert!(cs.get(\"xor result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::NegatedAllocatedFalse,\n OperandType::NegatedAllocatedTrue,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"xor result\") == Field::one());\n assert_eq!(v.value, Some(true));\n },\n (\n OperandType::NegatedAllocatedFalse,\n OperandType::NegatedAllocatedFalse,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"xor result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n\n _ => panic!(\"this should never be encountered\"),\n }\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_boolean_or() {\n let variants = [\n OperandType::True,\n OperandType::False,\n OperandType::AllocatedTrue,\n OperandType::AllocatedFalse,\n OperandType::NegatedAllocatedTrue,\n OperandType::NegatedAllocatedFalse,\n ];\n\n for first_operand in variants.iter().cloned() {\n for second_operand in variants.iter().cloned() {\n let mut cs = TestConstraintSystem::<Fr>::new();\n\n let a;\n let b;\n\n {\n let mut dyn_construct = |operand, name| {\n let cs = cs.ns(|| name);\n\n match operand {\n OperandType::True => Boolean::constant(true),\n OperandType::False => Boolean::constant(false),\n OperandType::AllocatedTrue => {\n Boolean::from(AllocatedBit::alloc(cs, || Ok(true)).unwrap())\n },\n OperandType::AllocatedFalse => {\n Boolean::from(AllocatedBit::alloc(cs, || Ok(false)).unwrap())\n },\n OperandType::NegatedAllocatedTrue => {\n Boolean::from(AllocatedBit::alloc(cs, || Ok(true)).unwrap()).not()\n },\n OperandType::NegatedAllocatedFalse => {\n Boolean::from(AllocatedBit::alloc(cs, || Ok(false)).unwrap()).not()\n },\n }\n };\n\n a = dyn_construct(first_operand, \"a\");\n b = dyn_construct(second_operand, \"b\");\n }\n\n let c = Boolean::or(&mut cs, &a, &b).unwrap();\n\n assert!(cs.is_satisfied());\n\n match (first_operand, second_operand, c) {\n (OperandType::True, OperandType::True, Boolean::Constant(true)) => {},\n (OperandType::True, OperandType::False, Boolean::Constant(true)) => {},\n (OperandType::True, OperandType::AllocatedTrue, Boolean::Constant(true)) => {},\n (OperandType::True, OperandType::AllocatedFalse, Boolean::Constant(true)) => {},\n (\n OperandType::True,\n OperandType::NegatedAllocatedTrue,\n Boolean::Constant(true),\n ) => {},\n (\n OperandType::True,\n OperandType::NegatedAllocatedFalse,\n Boolean::Constant(true),\n ) => {},\n\n (OperandType::False, OperandType::True, Boolean::Constant(true)) => {},\n (OperandType::False, OperandType::False, Boolean::Constant(false)) => {},\n (OperandType::False, OperandType::AllocatedTrue, Boolean::Is(_)) => {},\n (OperandType::False, OperandType::AllocatedFalse, Boolean::Is(_)) => {},\n (OperandType::False, OperandType::NegatedAllocatedTrue, Boolean::Not(_)) => {},\n (OperandType::False, OperandType::NegatedAllocatedFalse, Boolean::Not(_)) => {},\n\n (OperandType::AllocatedTrue, OperandType::True, Boolean::Constant(true)) => {},\n (OperandType::AllocatedTrue, OperandType::False, Boolean::Is(_)) => {},\n (\n OperandType::AllocatedTrue,\n OperandType::AllocatedTrue,\n Boolean::Is(ref v),\n ) => {\n assert_eq!(v.value, Some(true));\n },\n (\n OperandType::AllocatedTrue,\n OperandType::AllocatedFalse,\n Boolean::Is(ref v),\n ) => {\n assert_eq!(v.value, Some(true));\n },\n (\n OperandType::AllocatedTrue,\n OperandType::NegatedAllocatedTrue,\n Boolean::Not(ref v),\n ) => {\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::AllocatedTrue,\n OperandType::NegatedAllocatedFalse,\n Boolean::Not(ref v),\n ) => {\n assert_eq!(v.value, Some(false));\n },\n\n (OperandType::AllocatedFalse, OperandType::True, Boolean::Constant(true)) => {},\n (OperandType::AllocatedFalse, OperandType::False, Boolean::Is(_)) => {},\n (\n OperandType::AllocatedFalse,\n OperandType::AllocatedTrue,\n Boolean::Is(ref v),\n ) => {\n assert_eq!(v.value, Some(true));\n },\n (\n OperandType::AllocatedFalse,\n OperandType::AllocatedFalse,\n Boolean::Is(ref v),\n ) => {\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::AllocatedFalse,\n OperandType::NegatedAllocatedTrue,\n Boolean::Not(ref v),\n ) => {\n assert_eq!(v.value, Some(true));\n },\n (\n OperandType::AllocatedFalse,\n OperandType::NegatedAllocatedFalse,\n Boolean::Not(ref v),\n ) => {\n assert_eq!(v.value, Some(false));\n },\n\n (\n OperandType::NegatedAllocatedTrue,\n OperandType::True,\n Boolean::Constant(true),\n ) => {},\n (OperandType::NegatedAllocatedTrue, OperandType::False, Boolean::Not(_)) => {},\n (\n OperandType::NegatedAllocatedTrue,\n OperandType::AllocatedTrue,\n Boolean::Not(ref v),\n ) => {\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::NegatedAllocatedTrue,\n OperandType::AllocatedFalse,\n Boolean::Not(ref v),\n ) => {\n assert_eq!(v.value, Some(true));\n },\n (\n OperandType::NegatedAllocatedTrue,\n OperandType::NegatedAllocatedTrue,\n Boolean::Not(ref v),\n ) => {\n assert_eq!(v.value, Some(true));\n },\n (\n OperandType::NegatedAllocatedTrue,\n OperandType::NegatedAllocatedFalse,\n Boolean::Not(ref v),\n ) => {\n assert_eq!(v.value, Some(false));\n },\n\n (\n OperandType::NegatedAllocatedFalse,\n OperandType::True,\n Boolean::Constant(true),\n ) => {},\n (OperandType::NegatedAllocatedFalse, OperandType::False, Boolean::Not(_)) => {},\n (\n OperandType::NegatedAllocatedFalse,\n OperandType::AllocatedTrue,\n Boolean::Not(ref v),\n ) => {\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::NegatedAllocatedFalse,\n OperandType::AllocatedFalse,\n Boolean::Not(ref v),\n ) => {\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::NegatedAllocatedFalse,\n OperandType::NegatedAllocatedTrue,\n Boolean::Not(ref v),\n ) => {\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::NegatedAllocatedFalse,\n OperandType::NegatedAllocatedFalse,\n Boolean::Not(ref v),\n ) => {\n assert_eq!(v.value, Some(false));\n },\n\n _ => panic!(\n \"this should never be encountered, in case: (a = {:?}, b = {:?}, c = {:?})\",\n a, b, c\n ),\n }\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_boolean_and() {\n let variants = [\n OperandType::True,\n OperandType::False,\n OperandType::AllocatedTrue,\n OperandType::AllocatedFalse,\n OperandType::NegatedAllocatedTrue,\n OperandType::NegatedAllocatedFalse,\n ];\n\n for first_operand in variants.iter().cloned() {\n for second_operand in variants.iter().cloned() {\n let mut cs = TestConstraintSystem::<Fr>::new();\n\n let a;\n let b;\n\n {\n let mut dyn_construct = |operand, name| {\n let cs = cs.ns(|| name);\n\n match operand {\n OperandType::True => Boolean::constant(true),\n OperandType::False => Boolean::constant(false),\n OperandType::AllocatedTrue => {\n Boolean::from(AllocatedBit::alloc(cs, || Ok(true)).unwrap())\n },\n OperandType::AllocatedFalse => {\n Boolean::from(AllocatedBit::alloc(cs, || Ok(false)).unwrap())\n },\n OperandType::NegatedAllocatedTrue => {\n Boolean::from(AllocatedBit::alloc(cs, || Ok(true)).unwrap()).not()\n },\n OperandType::NegatedAllocatedFalse => {\n Boolean::from(AllocatedBit::alloc(cs, || Ok(false)).unwrap()).not()\n },\n }\n };\n\n a = dyn_construct(first_operand, \"a\");\n b = dyn_construct(second_operand, \"b\");\n }\n\n let c = Boolean::and(&mut cs, &a, &b).unwrap();\n\n assert!(cs.is_satisfied());\n\n match (first_operand, second_operand, c) {\n (OperandType::True, OperandType::True, Boolean::Constant(true)) => {},\n (OperandType::True, OperandType::False, Boolean::Constant(false)) => {},\n (OperandType::True, OperandType::AllocatedTrue, Boolean::Is(_)) => {},\n (OperandType::True, OperandType::AllocatedFalse, Boolean::Is(_)) => {},\n (OperandType::True, OperandType::NegatedAllocatedTrue, Boolean::Not(_)) => {},\n (OperandType::True, OperandType::NegatedAllocatedFalse, Boolean::Not(_)) => {},\n\n (OperandType::False, OperandType::True, Boolean::Constant(false)) => {},\n (OperandType::False, OperandType::False, Boolean::Constant(false)) => {},\n (OperandType::False, OperandType::AllocatedTrue, Boolean::Constant(false)) => {\n },\n (OperandType::False, OperandType::AllocatedFalse, Boolean::Constant(false)) => {\n },\n (\n OperandType::False,\n OperandType::NegatedAllocatedTrue,\n Boolean::Constant(false),\n ) => {},\n (\n OperandType::False,\n OperandType::NegatedAllocatedFalse,\n Boolean::Constant(false),\n ) => {},\n\n (OperandType::AllocatedTrue, OperandType::True, Boolean::Is(_)) => {},\n (OperandType::AllocatedTrue, OperandType::False, Boolean::Constant(false)) => {\n },\n (\n OperandType::AllocatedTrue,\n OperandType::AllocatedTrue,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"and result\") == Field::one());\n assert_eq!(v.value, Some(true));\n },\n (\n OperandType::AllocatedTrue,\n OperandType::AllocatedFalse,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"and result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::AllocatedTrue,\n OperandType::NegatedAllocatedTrue,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"and not result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::AllocatedTrue,\n OperandType::NegatedAllocatedFalse,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"and not result\") == Field::one());\n assert_eq!(v.value, Some(true));\n },\n\n (OperandType::AllocatedFalse, OperandType::True, Boolean::Is(_)) => {},\n (OperandType::AllocatedFalse, OperandType::False, Boolean::Constant(false)) => {\n },\n (\n OperandType::AllocatedFalse,\n OperandType::AllocatedTrue,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"and result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::AllocatedFalse,\n OperandType::AllocatedFalse,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"and result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::AllocatedFalse,\n OperandType::NegatedAllocatedTrue,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"and not result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::AllocatedFalse,\n OperandType::NegatedAllocatedFalse,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"and not result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n\n (OperandType::NegatedAllocatedTrue, OperandType::True, Boolean::Not(_)) => {},\n (\n OperandType::NegatedAllocatedTrue,\n OperandType::False,\n Boolean::Constant(false),\n ) => {},\n (\n OperandType::NegatedAllocatedTrue,\n OperandType::AllocatedTrue,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"and not result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::NegatedAllocatedTrue,\n OperandType::AllocatedFalse,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"and not result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::NegatedAllocatedTrue,\n OperandType::NegatedAllocatedTrue,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"nor result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::NegatedAllocatedTrue,\n OperandType::NegatedAllocatedFalse,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"nor result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n\n (OperandType::NegatedAllocatedFalse, OperandType::True, Boolean::Not(_)) => {},\n (\n OperandType::NegatedAllocatedFalse,\n OperandType::False,\n Boolean::Constant(false),\n ) => {},\n (\n OperandType::NegatedAllocatedFalse,\n OperandType::AllocatedTrue,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"and not result\") == Field::one());\n assert_eq!(v.value, Some(true));\n },\n (\n OperandType::NegatedAllocatedFalse,\n OperandType::AllocatedFalse,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"and not result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::NegatedAllocatedFalse,\n OperandType::NegatedAllocatedTrue,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"nor result\") == Field::zero());\n assert_eq!(v.value, Some(false));\n },\n (\n OperandType::NegatedAllocatedFalse,\n OperandType::NegatedAllocatedFalse,\n Boolean::Is(ref v),\n ) => {\n assert!(cs.get(\"nor result\") == Field::one());\n assert_eq!(v.value, Some(true));\n },\n\n _ => {\n panic!(\n \"unexpected behavior at {:?} AND {:?}\",\n first_operand, second_operand\n );\n },\n }\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_enforce_in_field() {\n {\n let mut cs = TestConstraintSystem::<Fr>::new();\n\n let mut bits = vec![];\n for (i, b) in BitIterator::new(Fr::characteristic()).skip(1).enumerate() {\n bits.push(Boolean::from(\n AllocatedBit::alloc(cs.ns(|| format!(\"bit_gadget {}\", i)), || Ok(b)).unwrap(),\n ));\n }\n\n Boolean::enforce_in_field::<_, _, Fr>(&mut cs, &bits).unwrap();\n\n assert!(!cs.is_satisfied());\n }\n\n let mut rng = XorShiftRng::seed_from_u64(1231275789u64);\n\n for _ in 0..1000 {\n let r = Fr::rand(&mut rng);\n let mut cs = TestConstraintSystem::<Fr>::new();\n\n let mut bits = vec![];\n for (i, b) in BitIterator::new(r.into_repr()).skip(1).enumerate() {\n bits.push(Boolean::from(\n AllocatedBit::alloc(cs.ns(|| format!(\"bit_gadget {}\", i)), || Ok(b)).unwrap(),\n ));\n }\n\n Boolean::enforce_in_field::<_, _, Fr>(&mut cs, &bits).unwrap();\n\n assert!(cs.is_satisfied());\n }\n\n // for _ in 0..1000 {\n // // Sample a random element not in the field\n // let r = loop {\n // let mut a = Fr::rand(&mut rng).into_repr();\n // let b = Fr::rand(&mut rng).into_repr();\n\n // a.add_nocarry(&b);\n // // we're shaving off the high bit_gadget later\n // a.as_mut()[3] &= 0x7fffffffffffffff;\n // if Fr::from_repr(a).is_err() {\n // break a;\n // }\n // };\n\n // let mut cs = TestConstraintSystem::<Fr>::new();\n\n // let mut bits = vec![];\n // for (i, b) in BitIterator::new(r).skip(1).enumerate() {\n // bits.push(Boolean::from(\n // AllocatedBit::alloc(cs.ns(|| format!(\"bit_gadget {}\",\n // i)), Some(b)) .unwrap(),\n // ));\n // }\n\n // Boolean::enforce_in_field::<_, _, Fr>(&mut cs, &bits).unwrap();\n\n // assert!(!cs.is_satisfied());\n // }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_enforce_nand() {\n {\n let mut cs = TestConstraintSystem::<Fr>::new();\n\n assert!(Boolean::enforce_nand(&mut cs, &[Boolean::constant(false)]).is_ok());\n assert!(Boolean::enforce_nand(&mut cs, &[Boolean::constant(true)]).is_err());\n }\n\n for i in 1..5 {\n // with every possible assignment for them\n for mut b in 0..(1 << i) {\n // with every possible negation\n for mut n in 0..(1 << i) {\n let mut cs = TestConstraintSystem::<Fr>::new();\n\n let mut expected = true;\n\n let mut bits = vec![];\n for j in 0..i {\n expected &= b & 1 == 1;\n\n if n & 1 == 1 {\n bits.push(Boolean::from(\n AllocatedBit::alloc(cs.ns(|| format!(\"bit_gadget {}\", j)), || {\n Ok(b & 1 == 1)\n })\n .unwrap(),\n ));\n } else {\n bits.push(\n Boolean::from(\n AllocatedBit::alloc(\n cs.ns(|| format!(\"bit_gadget {}\", j)),\n || Ok(b & 1 == 0),\n )\n .unwrap(),\n )\n .not(),\n );\n }\n\n b >>= 1;\n n >>= 1;\n }\n\n let expected = !expected;\n\n Boolean::enforce_nand(&mut cs, &bits).unwrap();\n\n if expected {\n assert!(cs.is_satisfied());\n } else {\n assert!(!cs.is_satisfied());\n }\n }\n }\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_kary_and() {\n // test different numbers of operands\n for i in 1..15 {\n // with every possible assignment for them\n for mut b in 0..(1 << i) {\n let mut cs = TestConstraintSystem::<Fr>::new();\n\n let mut expected = true;\n\n let mut bits = vec![];\n for j in 0..i {\n expected &= b & 1 == 1;\n\n bits.push(Boolean::from(\n AllocatedBit::alloc(cs.ns(|| format!(\"bit_gadget {}\", j)), || {\n Ok(b & 1 == 1)\n })\n .unwrap(),\n ));\n b >>= 1;\n }\n\n let r = Boolean::kary_and(&mut cs, &bits).unwrap();\n\n assert!(cs.is_satisfied());\n\n match r {\n Boolean::Is(ref r) => {\n assert_eq!(r.value.unwrap(), expected);\n },\n _ => unreachable!(),\n }\n }\n }\n }\n}" ]
f70bcbcc41ebc80cd725e2cbd01bc20fef7736db
2,917
rs
Rust
rust/src/day13.rs
bwestlin/advent-of-code-2020
9377c8953fe10db5f14c80630effc07cc79cfb15
[ "MIT" ]
null
null
null
rust/src/day13.rs
bwestlin/advent-of-code-2020
9377c8953fe10db5f14c80630effc07cc79cfb15
[ "MIT" ]
null
null
null
rust/src/day13.rs
bwestlin/advent-of-code-2020
9377c8953fe10db5f14c80630effc07cc79cfb15
[ "MIT" ]
null
null
null
extern crate utils; use std::env; use std::io::{self, BufReader}; use std::io::prelude::*; use std::fs::File; use utils::*; #[derive(Debug)] struct Input { earliest_ts: u64, bus_ids: Vec<Option<u64>> } fn part1(input: &Input) -> u64 { let (least_wait, bus_id) = input.bus_ids.iter() .flatten() .fold((std::u64::MAX, 0), |(least_wait_time, least_wait_bus_id), &bus_id| { let time_left = bus_id - (input.earliest_ts % bus_id); if time_left < least_wait_time { (time_left, bus_id) } else { (least_wait_time, least_wait_bus_id) } }); least_wait * bus_id } fn part2(input: &Input) -> u64 { let mut start = 0; let mut step = input.bus_ids[0].unwrap(); for i in 1..input.bus_ids.len() { if let Some(bus_id) = input.bus_ids[i] { let bus_id = bus_id; let mut found = None; for t in (start..).step_by(step as usize) { if (t + i as u64) % bus_id == 0 { if let Some(found) = found { step = t - found; start = found + step; break; } else { if i == input.bus_ids.len() - 1 { return t; } found = Some(t) } } } } } 0 } fn main() { measure(|| { let input = input().expect("Input failed"); println!("Part1: {}", part1(&input)); println!("Part2: {}", part2(&input)); }); } fn read_input<R: Read>(reader: BufReader<R>) -> io::Result<Input> { let mut lines = reader.lines(); Ok(Input { earliest_ts: lines.next().unwrap()?.parse::<u64>().unwrap(), bus_ids: lines.next().unwrap()?.split(',').map(|i| i.parse::<u64>().ok()).collect::<Vec<_>>() }) } fn input() -> io::Result<Input> { let f = File::open(env::args().skip(1).next().expect("No input file given"))?; read_input(BufReader::new(f)) } #[cfg(test)] mod tests { use super::*; const INPUT: &'static str = "939 7,13,x,x,59,x,31,19"; fn as_input(s: &str) -> Input { read_input(BufReader::new(s.split('\n').map(|s| s.trim()).collect::<Vec<_>>().join("\n").as_bytes())).unwrap() } #[test] fn test_part1() { assert_eq!(part1(&as_input(INPUT)), 295); } #[test] fn test_part2() { assert_eq!(part2(&as_input(INPUT)), 1068781); assert_eq!(part2(&as_input("0\n17,x,13,19")), 3417); assert_eq!(part2(&as_input("0\n67,7,59,61")), 754018); assert_eq!(part2(&as_input("0\n67,x,7,59,61")), 779210); assert_eq!(part2(&as_input("0\n67,7,x,59,61")), 1261476); assert_eq!(part2(&as_input("0\n1789,37,47,1889")), 1202161486); } }
26.279279
118
0.494001
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_part1() {\n assert_eq!(part1(&as_input(INPUT)), 295);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_part2() {\n assert_eq!(part2(&as_input(INPUT)), 1068781);\n assert_eq!(part2(&as_input(\"0\\n17,x,13,19\")), 3417);\n assert_eq!(part2(&as_input(\"0\\n67,7,59,61\")), 754018);\n assert_eq!(part2(&as_input(\"0\\n67,x,7,59,61\")), 779210);\n assert_eq!(part2(&as_input(\"0\\n67,7,x,59,61\")), 1261476);\n assert_eq!(part2(&as_input(\"0\\n1789,37,47,1889\")), 1202161486);\n }\n}" ]
f70bd9a6a5db943516d37dd020c5d4a0ba47e6ad
619
rs
Rust
src/y2019/star09/mod.rs
tm-drtina/advent-of-code
d16b44ee258c8fc4e992d162891fdd87f02aa02e
[ "MIT" ]
null
null
null
src/y2019/star09/mod.rs
tm-drtina/advent-of-code
d16b44ee258c8fc4e992d162891fdd87f02aa02e
[ "MIT" ]
null
null
null
src/y2019/star09/mod.rs
tm-drtina/advent-of-code
d16b44ee258c8fc4e992d162891fdd87f02aa02e
[ "MIT" ]
null
null
null
pub mod part1; pub mod part2; #[cfg(test)] mod tests { #[test] fn part1() { let expected = 3780860499_i64; let actual = super::part1::run(include_str!("input.txt")); assert_eq!(expected, actual); } #[test] fn part1_sanity() { let expected = 99_i64; let actual = super::part1::run("109,1,204,-1,1001,100,1,100,1008,100,16,101,1006,101,0,99"); assert_eq!(expected, actual); } #[test] fn part2() { let expected = 33343; let actual = super::part2::run(include_str!("input.txt")); assert_eq!(expected, actual); } }
24.76
100
0.563813
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn part1() {\n let expected = 3780860499_i64;\n let actual = super::part1::run(include_str!(\"input.txt\"));\n assert_eq!(expected, actual);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn part1_sanity() {\n let expected = 99_i64;\n let actual = super::part1::run(\"109,1,204,-1,1001,100,1,100,1008,100,16,101,1006,101,0,99\");\n assert_eq!(expected, actual);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn part2() {\n let expected = 33343;\n let actual = super::part2::run(include_str!(\"input.txt\"));\n assert_eq!(expected, actual);\n }\n}" ]
f70c220afec4c54e46be629a40cd6163a655f314
23,584
rs
Rust
src/cargo.rs
AtkinsChang/tarpaulin
a8cf5cfebbc5d9aed03bdfcef4a2b76c088d3aea
[ "Apache-2.0", "MIT" ]
null
null
null
src/cargo.rs
AtkinsChang/tarpaulin
a8cf5cfebbc5d9aed03bdfcef4a2b76c088d3aea
[ "Apache-2.0", "MIT" ]
null
null
null
src/cargo.rs
AtkinsChang/tarpaulin
a8cf5cfebbc5d9aed03bdfcef4a2b76c088d3aea
[ "Apache-2.0", "MIT" ]
null
null
null
use crate::config::*; use crate::errors::RunError; use crate::path_utils::get_source_walker; use cargo_metadata::{diagnostic::DiagnosticLevel, CargoOpt, Message, Metadata, MetadataCommand}; use lazy_static::lazy_static; use regex::Regex; use serde::{Deserialize, Serialize}; use std::collections::{HashMap, HashSet}; use std::env; use std::fs::{read_dir, read_to_string, remove_dir_all, File}; use std::io; use std::io::{BufRead, BufReader}; use std::path::{Component, Path, PathBuf}; use std::process::{Command, Stdio}; use toml::Value; use tracing::{error, info, trace, warn}; use walkdir::{DirEntry, WalkDir}; #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] enum Channel { Stable, Beta, Nightly, } #[derive(Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] struct CargoVersionInfo { major: usize, minor: usize, channel: Channel, year: usize, month: usize, day: usize, } impl CargoVersionInfo { fn supports_llvm_cov(&self) -> bool { self.minor >= 50 && self.channel == Channel::Nightly } } #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)] pub struct TestBinary { path: PathBuf, ty: Option<RunType>, cargo_dir: Option<PathBuf>, pkg_name: Option<String>, pkg_version: Option<String>, pkg_authors: Option<Vec<String>>, should_panic: bool, } #[derive(Clone, Debug)] struct DocTestBinaryMeta { prefix: String, line: usize, } impl TestBinary { pub fn new(path: PathBuf, ty: Option<RunType>) -> Self { Self { path, ty, pkg_name: None, pkg_version: None, pkg_authors: None, cargo_dir: None, should_panic: false, } } pub fn path(&self) -> &Path { &self.path } pub fn run_type(&self) -> Option<RunType> { self.ty } pub fn manifest_dir(&self) -> &Option<PathBuf> { &self.cargo_dir } pub fn pkg_name(&self) -> &Option<String> { &self.pkg_name } pub fn pkg_version(&self) -> &Option<String> { &self.pkg_version } pub fn pkg_authors(&self) -> &Option<Vec<String>> { &self.pkg_authors } /// Should be `false` for normal tests and for doctests either `true` or /// `false` depending on the test attribute pub fn should_panic(&self) -> bool { self.should_panic } } impl DocTestBinaryMeta { fn new<P: AsRef<Path>>(test: P) -> Option<Self> { if let Some(Component::Normal(folder)) = test.as_ref().components().nth_back(1) { let temp = folder.to_string_lossy(); let file_end = temp.rfind("rs").map(|i| i + 2)?; let end = temp.rfind('_')?; if end > file_end + 1 { let line = temp[(file_end + 1)..end].parse::<usize>().ok()?; Some(Self { prefix: temp[..file_end].to_string(), line, }) } else { None } } else { None } } } lazy_static! { static ref CARGO_VERSION_INFO: Option<CargoVersionInfo> = { let version_info = Regex::new( r"cargo (\d)\.(\d+)\.\d+([\-betanightly]*) \([[:alnum:]]+ (\d{4})-(\d{2})-(\d{2})\)", ) .unwrap(); Command::new("cargo") .arg("--version") .output() .map(|x| { let s = String::from_utf8_lossy(&x.stdout); if let Some(cap) = version_info.captures(&s) { let major = cap[1].parse().unwrap(); let minor = cap[2].parse().unwrap(); // We expect a string like `cargo 1.50.0-nightly (a0f433460 2020-02-01) // the version number either has `-nightly` `-beta` or empty for stable let channel = match &cap[3] { "-nightly" => Channel::Nightly, "-beta" => Channel::Beta, _ => Channel::Stable, }; let year = cap[4].parse().unwrap(); let month = cap[5].parse().unwrap(); let day = cap[6].parse().unwrap(); Some(CargoVersionInfo { major, minor, channel, year, month, day, }) } else { None } }) .unwrap_or(None) }; } pub fn get_tests(config: &Config) -> Result<Vec<TestBinary>, RunError> { let mut result = vec![]; if config.force_clean { let cleanup_dir = if config.release { config.target_dir().join("debug") } else { config.target_dir().join("release") }; info!("Cleaning project"); if cleanup_dir.exists() { if let Err(e) = remove_dir_all(cleanup_dir) { error!("Cargo clean failed: {}", e); } } } let manifest = match config.manifest.as_path().to_str() { Some(s) => s, None => "Cargo.toml", }; let metadata = MetadataCommand::new() .manifest_path(manifest) .features(CargoOpt::AllFeatures) .exec() .map_err(|e| RunError::Cargo(e.to_string()))?; for ty in &config.run_types { run_cargo(&metadata, manifest, config, Some(*ty), &mut result)?; } if config.has_named_tests() { run_cargo(&metadata, manifest, config, None, &mut result)? } else if config.run_types.is_empty() { let ty = if config.command == Mode::Test { Some(RunType::Tests) } else { None }; run_cargo(&metadata, manifest, config, ty, &mut result)?; } Ok(result) } fn run_cargo( metadata: &Metadata, manifest: &str, config: &Config, ty: Option<RunType>, result: &mut Vec<TestBinary>, ) -> Result<(), RunError> { let mut cmd = create_command(manifest, config, ty); if ty != Some(RunType::Doctests) { cmd.stdout(Stdio::piped()); } else { clean_doctest_folder(&config.doctest_dir()); cmd.stdout(Stdio::null()); } trace!("Running command {:?}", cmd); let mut child = cmd.spawn().map_err(|e| RunError::Cargo(e.to_string()))?; if ty != Some(RunType::Doctests) { let mut package_ids = vec![]; let reader = std::io::BufReader::new(child.stdout.take().unwrap()); let mut error = None; for msg in Message::parse_stream(reader) { match msg { Ok(Message::CompilerArtifact(art)) => { if let Some(path) = art.executable { if !art.profile.test && config.command == Mode::Test { continue; } result.push(TestBinary::new(PathBuf::from(path), ty)); package_ids.push(art.package_id.clone()); } } Ok(Message::CompilerMessage(m)) => match m.message.level { DiagnosticLevel::Error | DiagnosticLevel::Ice => { let msg = format!("{}: {}", m.target.name, m.message.message); error = Some(RunError::TestCompile(msg)); break; } _ => {} }, Err(e) => { error!("Error parsing cargo messages {}", e); } _ => {} } } let status = child.wait().unwrap(); if let Some(error) = error { return Err(error); } if !status.success() { return Err(RunError::Cargo("cargo run failed".to_string())); }; for (res, package) in result.iter_mut().zip(package_ids.iter()) { let package = &metadata[package]; res.cargo_dir = package .manifest_path .parent() .map(|x| PathBuf::from(x.to_path_buf())); res.pkg_name = Some(package.name.clone()); res.pkg_version = Some(package.version.to_string()); res.pkg_authors = Some(package.authors.clone()); } child.wait().map_err(|e| RunError::Cargo(e.to_string()))?; } else { // need to wait for compiling to finish before getting doctests // also need to wait with output to ensure the stdout buffer doesn't fill up let out = child .wait_with_output() .map_err(|e| RunError::Cargo(e.to_string()))?; if !out.status.success() { error!("Building doctests failed"); return Err(RunError::Cargo("Building doctest failed".to_string())); } let walker = WalkDir::new(&config.doctest_dir()).into_iter(); let dir_entries = walker .filter_map(|e| e.ok()) .filter(|e| matches!(e.metadata(), Ok(ref m) if m.is_file() && m.len() != 0)) .collect::<Vec<_>>(); let should_panics = get_panic_candidates(&dir_entries, config); for dt in &dir_entries { let mut tb = TestBinary::new(dt.path().to_path_buf(), ty); let mut current_dir = dt.path(); loop { if current_dir.is_dir() && current_dir.join("Cargo.toml").exists() { tb.cargo_dir = Some(current_dir.to_path_buf()); break; } match current_dir.parent() { Some(s) => { current_dir = s; } None => break, } } // Now to do my magic! if let Some(meta) = DocTestBinaryMeta::new(dt.path()) { if let Some(lines) = should_panics.get(&meta.prefix) { tb.should_panic |= lines.contains(&meta.line); } } result.push(tb); } } Ok(()) } fn convert_to_prefix(p: &Path) -> Option<String> { // Need to go from directory after last one with Cargo.toml let convert_name = |p: &Path| { if let Some(s) = p.file_name() { s.to_str().map(|x| x.replace('.', "_")).unwrap_or_default() } else { String::new() } }; let mut buffer = vec![convert_name(p)]; let mut parent = p.parent(); while let Some(path_temp) = parent { if !path_temp.join("Cargo.toml").exists() { buffer.insert(0, convert_name(path_temp)); } else { break; } parent = path_temp.parent(); } if buffer.is_empty() { None } else { Some(buffer.join("_")) } } fn is_prefix_match(prefix: &str, entry: &Path) -> bool { convert_to_prefix(entry) .map(|s| s.contains(prefix)) .unwrap_or(false) } /// This returns a map of the string prefixes for the file in the doc test and a list of lines /// which contain the string `should_panic` it makes no guarantees that all these lines are a /// doctest attribute showing panic behaviour (but some of them will be) /// /// Currently all doctest files take the pattern of `{name}_{line}_{number}` where name is the /// path to the file with directory separators and dots replaced with underscores. Therefore /// each name could potentially map to many files as `src_some_folder_foo_rs_0_1` could go to /// `src/some/folder_foo.rs` or `src/some/folder/foo.rs` here we're going to work on a heuristic /// that any matching file is good because we can't do any better fn get_panic_candidates(tests: &[DirEntry], config: &Config) -> HashMap<String, Vec<usize>> { let mut result = HashMap::new(); let mut checked_files = HashSet::new(); let root = config.root(); for test in tests { if let Some(test_binary) = DocTestBinaryMeta::new(test.path()) { for dir_entry in get_source_walker(config) { let path = dir_entry.path(); if path.is_file() { if let Some(p) = path_relative_from(path, &root) { if is_prefix_match(&test_binary.prefix, &p) && !checked_files.contains(path) { checked_files.insert(path.to_path_buf()); let lines = find_panics_in_file(path).unwrap_or_default(); if !result.contains_key(&test_binary.prefix) { result.insert(test_binary.prefix.clone(), lines); } else if let Some(current_lines) = result.get_mut(&test_binary.prefix) { current_lines.extend_from_slice(&lines); } } } } } } else { warn!( "Invalid characters in name of doctest {}", test.path().display() ); } } result } fn find_panics_in_file(file: &Path) -> io::Result<Vec<usize>> { let f = File::open(file)?; let reader = BufReader::new(f); let lines = reader .lines() .enumerate() .filter(|(_, l)| { l.as_ref() .map(|x| x.contains("should_panic")) .unwrap_or(false) }) .map(|(i, _)| i + 1) // Move from line index to line number .collect(); Ok(lines) } fn create_command(manifest_path: &str, config: &Config, ty: Option<RunType>) -> Command { let mut test_cmd = Command::new("cargo"); if ty == Some(RunType::Doctests) { if let Some(toolchain) = env::var("RUSTUP_TOOLCHAIN") .ok() .filter(|t| t.starts_with("nightly")) { test_cmd.args(&[format!("+{}", toolchain).as_str(), "test"]); } else { test_cmd.args(&["+nightly", "test"]); } } else { if let Ok(toolchain) = env::var("RUSTUP_TOOLCHAIN") { test_cmd.arg(format!("+{}", toolchain)); } if config.command == Mode::Test { test_cmd.args(&["test", "--no-run"]); } else { test_cmd.arg("build"); } } test_cmd.args(&["--message-format", "json", "--manifest-path", manifest_path]); if let Some(ty) = ty { match ty { RunType::Tests => test_cmd.arg("--tests"), RunType::Doctests => test_cmd.arg("--doc"), RunType::Benchmarks => test_cmd.arg("--benches"), RunType::Examples => test_cmd.arg("--examples"), RunType::AllTargets => test_cmd.arg("--all-targets"), RunType::Lib => test_cmd.arg("--lib"), RunType::Bins => test_cmd.arg("--bins"), }; } else { for test in &config.test_names { test_cmd.arg("--test"); test_cmd.arg(test); } for test in &config.bin_names { test_cmd.arg("--bin"); test_cmd.arg(test); } for test in &config.example_names { test_cmd.arg("--example"); test_cmd.arg(test); } for test in &config.bench_names { test_cmd.arg("--bench"); test_cmd.arg(test); } } init_args(&mut test_cmd, config); setup_environment(&mut test_cmd, config); test_cmd } fn init_args(test_cmd: &mut Command, config: &Config) { if config.debug { test_cmd.arg("-vvv"); } if config.locked { test_cmd.arg("--locked"); } if config.frozen { test_cmd.arg("--frozen"); } if config.no_fail_fast { test_cmd.arg("--no-fail-fast"); } if let Some(profile) = config.profile.as_ref() { test_cmd.arg("--profile"); test_cmd.arg(profile); } if let Some(jobs) = config.jobs { test_cmd.arg("--jobs"); test_cmd.arg(jobs.to_string()); } if let Some(features) = config.features.as_ref() { test_cmd.arg("--features"); test_cmd.arg(features); } if config.all_targets { test_cmd.arg("--all-targets"); } if config.all_features { test_cmd.arg("--all-features"); } if config.no_default_features { test_cmd.arg("--no-default-features"); } if config.all { test_cmd.arg("--workspace"); } if config.release { test_cmd.arg("--release"); } config.packages.iter().for_each(|package| { test_cmd.arg("--package"); test_cmd.arg(package); }); config.exclude.iter().for_each(|package| { test_cmd.arg("--exclude"); test_cmd.arg(package); }); test_cmd.arg("--color"); test_cmd.arg(config.color.to_string().to_ascii_lowercase()); if let Some(target) = config.target.as_ref() { test_cmd.args(&["--target", target]); } let args = vec![ "--target-dir".to_string(), format!("{}", config.target_dir().display()), ]; test_cmd.args(args); if config.offline { test_cmd.arg("--offline"); } for feat in &config.unstable_features { test_cmd.arg(format!("-Z{}", feat)); } if config.command == Mode::Test && !config.varargs.is_empty() { let mut args = vec!["--".to_string()]; args.extend_from_slice(&config.varargs); test_cmd.args(args); } } /// Old doc tests that no longer exist or where the line have changed can persist so delete them to /// avoid confusing the results fn clean_doctest_folder<P: AsRef<Path>>(doctest_dir: P) { if let Ok(rd) = read_dir(doctest_dir.as_ref()) { rd.flat_map(|e| e.ok()) .filter(|e| { e.path() .components() .next_back() .map(|e| e.as_os_str().to_string_lossy().contains("rs")) .unwrap_or(false) }) .for_each(|e| { if let Err(err) = remove_dir_all(e.path()) { warn!("Failed to delete {}: {}", e.path().display(), err); } }); } } fn handle_llvm_flags(value: &mut String, config: &Config) { if (config.engine == TraceEngine::Auto || config.engine == TraceEngine::Llvm) && supports_llvm_coverage() { value.push_str("-Z instrument-coverage "); } else if config.engine == TraceEngine::Llvm { error!("unable to utilise llvm coverage, due to compiler support. Falling back to Ptrace"); } } pub fn rustdoc_flags(config: &Config) -> String { const RUSTDOC: &str = "RUSTDOCFLAGS"; let common_opts = " -C link-dead-code -C debuginfo=2 --cfg=tarpaulin "; let mut value = format!( "{} --persist-doctests {} -Z unstable-options ", common_opts, config.doctest_dir().display() ); if let Ok(vtemp) = env::var(RUSTDOC) { if !vtemp.contains("--persist-doctests") { value.push_str(vtemp.as_ref()); } } handle_llvm_flags(&mut value, config); value } fn look_for_rustflags_in_table(value: &Value) -> String { let table = value.as_table().unwrap(); if let Some(rustflags) = table.get("rustflags") { let vec_of_flags: Vec<String> = rustflags .as_array() .unwrap() .into_iter() .filter_map(|x| x.as_str()) .map(|x| x.to_string()) .collect(); vec_of_flags.join(" ") } else { String::new() } } fn look_for_rustflags_in_file(path: &Path) -> Option<String> { if let Ok(contents) = read_to_string(path) { let value = contents.parse::<Value>().ok()?; let rustflags_in_file: Vec<String> = value .as_table()? .into_iter() .map(|(s, v)| { if s.as_str() == "build" { look_for_rustflags_in_table(v) } else { String::new() } }) .collect(); Some(rustflags_in_file.join(" ")) } else { None } } fn look_for_rustflags_in(path: &Path) -> Option<String> { let mut config_path = path.join("config"); let rustflags = look_for_rustflags_in_file(&config_path); if rustflags.is_some() { return rustflags; } config_path.pop(); config_path.push("config.toml"); let rustflags = look_for_rustflags_in_file(&config_path); if rustflags.is_some() { return rustflags; } None } fn build_config_path(base: impl AsRef<Path>) -> PathBuf { let mut config_path = PathBuf::from(base.as_ref()); config_path.push(base); config_path.push(".cargo"); config_path } fn gather_config_rust_flags(config: &Config) -> String { if let Some(rustflags) = look_for_rustflags_in(&build_config_path(&config.root())) { return rustflags; } if let Ok(cargo_home_config) = env::var("CARGO_HOME") { if let Some(rustflags) = look_for_rustflags_in(&PathBuf::from(cargo_home_config)) { return rustflags; } } String::new() } pub fn rust_flags(config: &Config) -> String { const RUSTFLAGS: &str = "RUSTFLAGS"; let mut value = config.rustflags.clone().unwrap_or_default(); value.push_str(" -C link-dead-code -C debuginfo=2 "); if !config.avoid_cfg_tarpaulin { value.push_str("--cfg=tarpaulin "); } if config.release { value.push_str("-C debug-assertions=off "); } handle_llvm_flags(&mut value, config); lazy_static! { static ref DEBUG_INFO: Regex = Regex::new(r#"\-C\s*debuginfo=\d"#).unwrap(); } if let Ok(vtemp) = env::var(RUSTFLAGS) { value.push_str(&DEBUG_INFO.replace_all(&vtemp, " ")); } else { let vtemp = gather_config_rust_flags(config); value.push_str(&DEBUG_INFO.replace_all(&vtemp, " ")); } value } fn setup_environment(cmd: &mut Command, config: &Config) { cmd.env("TARPAULIN", "1"); let rustflags = "RUSTFLAGS"; let value = rust_flags(config); cmd.env(rustflags, value); // doesn't matter if we don't use it let rustdoc = "RUSTDOCFLAGS"; let value = rustdoc_flags(config); trace!("Setting RUSTDOCFLAGS='{}'", value); cmd.env(rustdoc, value); } fn supports_llvm_coverage() -> bool { if let Some(version) = CARGO_VERSION_INFO.as_ref() { version.supports_llvm_cov() } else { false } } #[cfg(test)] mod tests { use super::*; #[test] fn llvm_cov_compatible_version() { let version = CargoVersionInfo { major: 1, minor: 50, channel: Channel::Nightly, year: 2020, month: 12, day: 22, }; assert!(version.supports_llvm_cov()); } #[test] fn llvm_cov_incompatible_version() { let mut version = CargoVersionInfo { major: 1, minor: 48, channel: Channel::Stable, year: 2020, month: 10, day: 14, }; assert!(!version.supports_llvm_cov()); version.channel = Channel::Beta; assert!(!version.supports_llvm_cov()); version.minor = 50; assert!(!version.supports_llvm_cov()); version.minor = 58; version.channel = Channel::Stable; assert!(!version.supports_llvm_cov()); } }
32
100
0.530868
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn llvm_cov_compatible_version() {\n let version = CargoVersionInfo {\n major: 1,\n minor: 50,\n channel: Channel::Nightly,\n year: 2020,\n month: 12,\n day: 22,\n };\n assert!(version.supports_llvm_cov());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn llvm_cov_incompatible_version() {\n let mut version = CargoVersionInfo {\n major: 1,\n minor: 48,\n channel: Channel::Stable,\n year: 2020,\n month: 10,\n day: 14,\n };\n assert!(!version.supports_llvm_cov());\n version.channel = Channel::Beta;\n assert!(!version.supports_llvm_cov());\n version.minor = 50;\n assert!(!version.supports_llvm_cov());\n version.minor = 58;\n version.channel = Channel::Stable;\n assert!(!version.supports_llvm_cov());\n }\n}" ]
f70c2aa6145e620821f0446e582e12053a7011a9
3,770
rs
Rust
src/notification/tests.rs
vishy1618/gcm
e88306683a6b5783cc14d83a60d1cc2847bfb46b
[ "MIT" ]
7
2015-12-28T11:58:49.000Z
2020-03-25T10:44:35.000Z
src/notification/tests.rs
vishy1618/gcm
e88306683a6b5783cc14d83a60d1cc2847bfb46b
[ "MIT" ]
6
2015-10-29T13:24:11.000Z
2017-06-03T13:27:53.000Z
src/notification/tests.rs
vishy1618/gcm
e88306683a6b5783cc14d83a60d1cc2847bfb46b
[ "MIT" ]
5
2015-12-28T07:12:03.000Z
2019-06-05T17:44:40.000Z
use serde_json; use {NotificationBuilder}; #[test] fn should_create_new_notification_message() { let nm = NotificationBuilder::new("title").finalize(); assert_eq!(nm.title, "title"); } #[test] fn should_set_notification_body() { let nm = NotificationBuilder::new("title").finalize(); assert_eq!(nm.body, None); let nm = NotificationBuilder::new("title") .body("body") .finalize(); let json_result = serde_json::to_string(&nm); assert_eq!(nm.body, Some("body")); assert!(json_result.is_ok()); assert_eq!(json_result.unwrap(), r#"{"title":"title","body":"body","icon":"myicon"}"#); } #[test] fn should_set_default_icon() { let nm = NotificationBuilder::new("title").finalize(); assert_eq!(nm.icon, "myicon"); } #[test] fn should_set_notification_icon() { let nm = NotificationBuilder::new("title") .icon("newicon") .finalize(); assert_eq!(nm.icon, "newicon"); } #[test] fn should_set_notification_sound() { let nm = NotificationBuilder::new("title").finalize(); assert_eq!(nm.sound, None); let nm = NotificationBuilder::new("title") .sound("sound.wav") .finalize(); assert_eq!(nm.sound, Some("sound.wav")); } #[test] fn should_set_notification_badge() { let nm = NotificationBuilder::new("title").finalize(); assert_eq!(nm.badge, None); let nm = NotificationBuilder::new("title") .badge("1") .finalize(); assert_eq!(nm.badge, Some("1")); } #[test] fn should_set_notification_tag() { let nm = NotificationBuilder::new("title").finalize(); assert_eq!(nm.tag, None); let nm = NotificationBuilder::new("title") .tag("tag") .finalize(); assert_eq!(nm.tag, Some("tag")); } #[test] fn should_set_notification_color() { let nm = NotificationBuilder::new("title").finalize(); assert_eq!(nm.color, None); let nm = NotificationBuilder::new("title") .color("color") .finalize(); assert_eq!(nm.color, Some("color")); } #[test] fn should_set_notification_click_action() { let nm = NotificationBuilder::new("title").finalize(); assert_eq!(nm.click_action, None); let nm = NotificationBuilder::new("title") .click_action("action") .finalize(); assert_eq!(nm.click_action, Some("action")); } #[test] fn should_set_notification_body_loc_key() { let nm = NotificationBuilder::new("title").finalize(); assert_eq!(nm.body_loc_key, None); let nm = NotificationBuilder::new("title") .body_loc_key("key") .finalize(); assert_eq!(nm.body_loc_key, Some("key")); } #[test] fn should_set_notification_body_loc_args() { let nm = NotificationBuilder::new("title").finalize(); assert_eq!(nm.body_loc_args, None); let nm = NotificationBuilder::new("title") .body_loc_args(vec!["args"]) .finalize(); let json_result = serde_json::to_string(&nm); assert_eq!(nm.body_loc_args, Some(vec!["args".to_string()])); assert_eq!(json_result.unwrap(), r#"{"title":"title","icon":"myicon","body_loc_args":["args"]}"#); } #[test] fn should_set_notification_title_loc_key() { let nm = NotificationBuilder::new("title").finalize(); assert_eq!(nm.title_loc_key, None); let nm = NotificationBuilder::new("title") .title_loc_key("key") .finalize(); assert_eq!(nm.title_loc_key, Some("key")); } #[test] fn should_set_notification_title_loc_args() { let nm = NotificationBuilder::new("title").finalize(); assert_eq!(nm.title_loc_args, None); let nm = NotificationBuilder::new("title") .title_loc_args(vec!["args"]) .finalize(); let json_result = serde_json::to_string(&nm); assert_eq!(nm.title_loc_args, Some(vec!["args".to_string()])); assert_eq!(json_result.unwrap(), r#"{"title":"title","icon":"myicon","title_loc_args":["args"]}"#); }
22.848485
101
0.664191
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn should_create_new_notification_message() {\n let nm = NotificationBuilder::new(\"title\").finalize();\n\n assert_eq!(nm.title, \"title\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn should_set_notification_body() {\n let nm = NotificationBuilder::new(\"title\").finalize();\n\n assert_eq!(nm.body, None);\n\n let nm = NotificationBuilder::new(\"title\")\n .body(\"body\")\n .finalize();\n\n let json_result = serde_json::to_string(&nm);\n\n assert_eq!(nm.body, Some(\"body\"));\n assert!(json_result.is_ok());\n assert_eq!(json_result.unwrap(), r#\"{\"title\":\"title\",\"body\":\"body\",\"icon\":\"myicon\"}\"#);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn should_set_default_icon() {\n let nm = NotificationBuilder::new(\"title\").finalize();\n\n assert_eq!(nm.icon, \"myicon\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn should_set_notification_icon() {\n let nm = NotificationBuilder::new(\"title\")\n .icon(\"newicon\")\n .finalize();\n\n assert_eq!(nm.icon, \"newicon\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn should_set_notification_sound() {\n let nm = NotificationBuilder::new(\"title\").finalize();\n\n assert_eq!(nm.sound, None);\n\n let nm = NotificationBuilder::new(\"title\")\n .sound(\"sound.wav\")\n .finalize();\n\n assert_eq!(nm.sound, Some(\"sound.wav\"));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn should_set_notification_badge() {\n let nm = NotificationBuilder::new(\"title\").finalize();\n\n assert_eq!(nm.badge, None);\n\n let nm = NotificationBuilder::new(\"title\")\n .badge(\"1\")\n .finalize();\n\n assert_eq!(nm.badge, Some(\"1\"));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn should_set_notification_tag() {\n let nm = NotificationBuilder::new(\"title\").finalize();\n\n assert_eq!(nm.tag, None);\n\n let nm = NotificationBuilder::new(\"title\")\n .tag(\"tag\")\n .finalize();\n\n assert_eq!(nm.tag, Some(\"tag\"));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn should_set_notification_color() {\n let nm = NotificationBuilder::new(\"title\").finalize();\n\n assert_eq!(nm.color, None);\n\n let nm = NotificationBuilder::new(\"title\")\n .color(\"color\")\n .finalize();\n\n assert_eq!(nm.color, Some(\"color\"));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn should_set_notification_click_action() {\n let nm = NotificationBuilder::new(\"title\").finalize();\n\n assert_eq!(nm.click_action, None);\n\n let nm = NotificationBuilder::new(\"title\")\n .click_action(\"action\")\n .finalize();\n\n assert_eq!(nm.click_action, Some(\"action\"));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn should_set_notification_body_loc_key() {\n let nm = NotificationBuilder::new(\"title\").finalize();\n\n assert_eq!(nm.body_loc_key, None);\n\n let nm = NotificationBuilder::new(\"title\")\n .body_loc_key(\"key\")\n .finalize();\n\n assert_eq!(nm.body_loc_key, Some(\"key\"));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn should_set_notification_body_loc_args() {\n let nm = NotificationBuilder::new(\"title\").finalize();\n\n assert_eq!(nm.body_loc_args, None);\n\n let nm = NotificationBuilder::new(\"title\")\n .body_loc_args(vec![\"args\"])\n .finalize();\n\n let json_result = serde_json::to_string(&nm);\n\n assert_eq!(nm.body_loc_args, Some(vec![\"args\".to_string()]));\n assert_eq!(json_result.unwrap(), r#\"{\"title\":\"title\",\"icon\":\"myicon\",\"body_loc_args\":[\"args\"]}\"#);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn should_set_notification_title_loc_key() {\n let nm = NotificationBuilder::new(\"title\").finalize();\n\n assert_eq!(nm.title_loc_key, None);\n\n let nm = NotificationBuilder::new(\"title\")\n .title_loc_key(\"key\")\n .finalize();\n\n assert_eq!(nm.title_loc_key, Some(\"key\"));\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn should_set_notification_title_loc_args() {\n let nm = NotificationBuilder::new(\"title\").finalize();\n\n assert_eq!(nm.title_loc_args, None);\n\n let nm = NotificationBuilder::new(\"title\")\n .title_loc_args(vec![\"args\"])\n .finalize();\n\n let json_result = serde_json::to_string(&nm);\n\n assert_eq!(nm.title_loc_args, Some(vec![\"args\".to_string()]));\n assert_eq!(json_result.unwrap(), r#\"{\"title\":\"title\",\"icon\":\"myicon\",\"title_loc_args\":[\"args\"]}\"#);\n}\n}" ]
f70ca87e304a91961d5b1c62f1b24204d6a9d3e7
616
rs
Rust
src/test/ui/rfc-2011-nicer-assert-messages/feature-gate-generic_assert.rs
randomicon00/rust
4e02a9281dd8c74cf5c04df044659b5e03b17571
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
src/test/ui/rfc-2011-nicer-assert-messages/feature-gate-generic_assert.rs
randomicon00/rust
4e02a9281dd8c74cf5c04df044659b5e03b17571
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
src/test/ui/rfc-2011-nicer-assert-messages/feature-gate-generic_assert.rs
randomicon00/rust
4e02a9281dd8c74cf5c04df044659b5e03b17571
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
// compile-flags: --test // run-pass // `generic_assert` is completely unimplemented and doesn't generate any logic, thus the // reason why this test currently passes #![feature(core_intrinsics, generic_assert, generic_assert_internals)] use std::fmt::{Debug, Formatter}; #[derive(Clone, Copy, PartialEq)] struct CopyDebug(i32); impl Debug for CopyDebug { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { f.write_str("With great power comes great electricity bills") } } #[test] fn test() { let _copy_debug = CopyDebug(1); assert!(_copy_debug == CopyDebug(3)); } fn main() { }
22.814815
88
0.696429
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test() {\n let _copy_debug = CopyDebug(1);\n assert!(_copy_debug == CopyDebug(3));\n}\n}" ]
f70ce9ae0aa5eb80a4e82f5cf03c159b897730bb
3,516
rs
Rust
tests/test_module.rs
Hanaasagi/pyo3
3e7a823dee2c4574534deb3cba28b2f1e2149047
[ "Apache-2.0" ]
null
null
null
tests/test_module.rs
Hanaasagi/pyo3
3e7a823dee2c4574534deb3cba28b2f1e2149047
[ "Apache-2.0" ]
null
null
null
tests/test_module.rs
Hanaasagi/pyo3
3e7a823dee2c4574534deb3cba28b2f1e2149047
[ "Apache-2.0" ]
null
null
null
#![feature(specialization)] #[macro_use] extern crate pyo3; use pyo3::prelude::*; use pyo3::types::PyDict; #[macro_use] mod common; #[pyclass] struct EmptyClass {} fn sum_as_string(a: i64, b: i64) -> String { format!("{}", a + b).to_string() } #[pyfunction] /// Doubles the given value fn double(x: usize) -> usize { x * 2 } /// This module is implemented in Rust. #[pymodinit] fn module_with_functions(py: Python, m: &PyModule) -> PyResult<()> { #[pyfn(m, "sum_as_string")] fn sum_as_string_py(_py: Python, a: i64, b: i64) -> PyResult<String> { let out = sum_as_string(a, b); return Ok(out); } #[pyfn(m, "no_parameters")] fn no_parameters() -> PyResult<usize> { return Ok(42); } m.add_class::<EmptyClass>().unwrap(); m.add("foo", "bar").unwrap(); m.add_function(wrap_function!(double)).unwrap(); m.add("also_double", wrap_function!(double)(py)).unwrap(); Ok(()) } #[test] #[cfg(Py_3)] fn test_module_with_functions() { let gil = Python::acquire_gil(); let py = gil.python(); let d = PyDict::new(py); d.set_item("module_with_functions", unsafe { PyObject::from_owned_ptr(py, PyInit_module_with_functions()) }) .unwrap(); let run = |code| py.run(code, None, Some(d)).unwrap(); run("assert module_with_functions.__doc__ == 'This module is implemented in Rust.'"); run("assert module_with_functions.sum_as_string(1, 2) == '3'"); run("assert module_with_functions.no_parameters() == 42"); run("assert module_with_functions.foo == 'bar'"); run("assert module_with_functions.EmptyClass != None"); run("assert module_with_functions.double(3) == 6"); run("assert module_with_functions.double.__doc__ == 'Doubles the given value'"); run("assert module_with_functions.also_double(3) == 6"); run("assert module_with_functions.also_double.__doc__ == 'Doubles the given value'"); } #[pymodinit(other_name)] fn some_name(_: Python, _: &PyModule) -> PyResult<()> { Ok(()) } #[test] #[cfg(Py_3)] fn test_module_renaming() { let gil = Python::acquire_gil(); let py = gil.python(); let d = PyDict::new(py); d.set_item("different_name", unsafe { PyObject::from_owned_ptr(py, PyInit_other_name()) }) .unwrap(); py.run( "assert different_name.__name__ == 'other_name'", None, Some(d), ) .unwrap(); } #[test] #[cfg(Py_3)] fn test_module_from_code() { let gil = Python::acquire_gil(); let py = gil.python(); let adder_mod = PyModule::from_code( py, "def add(a,b):\n\treturn a+b", "adder_mod.py", "adder_mod", ) .expect("Module code should be loaded"); let add_func = adder_mod .get("add") .expect("Add fucntion should be in the module") .to_object(py); let ret_value: i32 = add_func .call1(py, (1, 2)) .expect("A value should be returned") .extract(py) .expect("The value should be able to be converted to an i32"); assert_eq!(ret_value, 3); } #[pyfunction] fn r#move() -> usize { 42 } #[pymodinit] fn raw_ident_module(_py: Python, module: &PyModule) -> PyResult<()> { module.add_function(wrap_function!(r#move)) } #[test] #[cfg(Py_3)] fn test_raw_idents() { let gil = Python::acquire_gil(); let py = gil.python(); let module = unsafe { PyObject::from_owned_ptr(py, PyInit_raw_ident_module()) }; py_assert!(py, module, "module.move() == 42"); }
23.918367
89
0.618316
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_module_with_functions() {\n let gil = Python::acquire_gil();\n let py = gil.python();\n\n let d = PyDict::new(py);\n d.set_item(\"module_with_functions\", unsafe {\n PyObject::from_owned_ptr(py, PyInit_module_with_functions())\n })\n .unwrap();\n\n let run = |code| py.run(code, None, Some(d)).unwrap();\n\n run(\"assert module_with_functions.__doc__ == 'This module is implemented in Rust.'\");\n run(\"assert module_with_functions.sum_as_string(1, 2) == '3'\");\n run(\"assert module_with_functions.no_parameters() == 42\");\n run(\"assert module_with_functions.foo == 'bar'\");\n run(\"assert module_with_functions.EmptyClass != None\");\n run(\"assert module_with_functions.double(3) == 6\");\n run(\"assert module_with_functions.double.__doc__ == 'Doubles the given value'\");\n run(\"assert module_with_functions.also_double(3) == 6\");\n run(\"assert module_with_functions.also_double.__doc__ == 'Doubles the given value'\");\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_module_renaming() {\n let gil = Python::acquire_gil();\n let py = gil.python();\n\n let d = PyDict::new(py);\n d.set_item(\"different_name\", unsafe {\n PyObject::from_owned_ptr(py, PyInit_other_name())\n })\n .unwrap();\n\n py.run(\n \"assert different_name.__name__ == 'other_name'\",\n None,\n Some(d),\n )\n .unwrap();\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_module_from_code() {\n let gil = Python::acquire_gil();\n let py = gil.python();\n\n let adder_mod = PyModule::from_code(\n py,\n \"def add(a,b):\\n\\treturn a+b\",\n \"adder_mod.py\",\n \"adder_mod\",\n )\n .expect(\"Module code should be loaded\");\n\n let add_func = adder_mod\n .get(\"add\")\n .expect(\"Add fucntion should be in the module\")\n .to_object(py);\n\n let ret_value: i32 = add_func\n .call1(py, (1, 2))\n .expect(\"A value should be returned\")\n .extract(py)\n .expect(\"The value should be able to be converted to an i32\");\n\n assert_eq!(ret_value, 3);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_raw_idents() {\n let gil = Python::acquire_gil();\n let py = gil.python();\n\n let module = unsafe { PyObject::from_owned_ptr(py, PyInit_raw_ident_module()) };\n\n py_assert!(py, module, \"module.move() == 42\");\n}\n}" ]
f70d2ae02eda0602866e59b7326ceaa29a45628b
4,384
rs
Rust
src/image.rs
ruimo/remove_docker_images
f170cb878c6c250e3c80b2f43250ffc7fe443ad1
[ "Apache-2.0" ]
null
null
null
src/image.rs
ruimo/remove_docker_images
f170cb878c6c250e3c80b2f43250ffc7fe443ad1
[ "Apache-2.0" ]
null
null
null
src/image.rs
ruimo/remove_docker_images
f170cb878c6c250e3c80b2f43250ffc7fe443ad1
[ "Apache-2.0" ]
null
null
null
use std::hash::{Hash, Hasher}; use std::collections::HashMap; use std::collections::HashSet; use std::fmt; use super::version; #[cfg(test)] use super::image; pub struct ImageEntry { pub id: String, pub ver: version::Version, } impl PartialEq for ImageEntry { fn eq(&self, other: &Self) -> bool { self.ver == other.ver } } impl Eq for ImageEntry {} impl Hash for ImageEntry { fn hash<H:Hasher>(&self, state: &mut H) { self.ver.hash(state); } } impl fmt::Debug for ImageEntry { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ImageEntry [id: {}, ver: {}]", self.id, self.ver) } } pub struct Images { // key: repository pub entries: HashMap<String, HashSet<ImageEntry>> } impl Images { pub fn delete<F>(&self, canonical_keep_count: usize, snapshot_keep_count: usize, mut del: F) -> () where F : FnMut(&str, &version::Version) -> () // repo, version { for (repo, entry) in &self.entries { let mut sum_canonical: HashMap<&Option<String>, Vec<&version::Version>> = HashMap::new(); let mut sum_snapshot: HashMap<&Option<String>, Vec<&version::Version>> = HashMap::new(); for e in entry { let sum = if e.ver.is_snapshot { &mut sum_snapshot } else { &mut sum_canonical }; let keep_count = if e.ver.is_snapshot { snapshot_keep_count } else {canonical_keep_count }; let tbl = sum.entry(&e.ver.branch).or_insert_with(|| Vec::new()); match tbl.binary_search(&&e.ver) { Ok(_idx) => { }, Err(idx) => { tbl.insert(idx, &e.ver); } } if keep_count < tbl.len() { let v = tbl.remove(0); del(repo, v); } } } } } #[test] fn delete_test() { let parser = version::parser(); let mut map: HashMap<String, HashSet<image::ImageEntry>> = HashMap::new(); let mut entries0 = HashSet::new(); entries0.insert(ImageEntry { id: "id00".to_string(), ver: parser.parse("1.0").unwrap() }); entries0.insert(ImageEntry { id: "id01".to_string(), ver: parser.parse("1.1").unwrap() }); entries0.insert(ImageEntry { id: "id02".to_string(), ver: parser.parse("1.10").unwrap() }); entries0.insert(ImageEntry { id: "id03".to_string(), ver: parser.parse("1.2").unwrap() }); entries0.insert(ImageEntry { id: "id04".to_string(), ver: parser.parse("1.2-SNAPSHOT").unwrap() }); entries0.insert(ImageEntry { id: "id05".to_string(), ver: parser.parse("1.1-SNAPSHOT").unwrap() }); entries0.insert(ImageEntry { id: "id06".to_string(), ver: parser.parse("1.2.0-BR123").unwrap() }); entries0.insert(ImageEntry { id: "id07".to_string(), ver: parser.parse("1.2.1-BR123").unwrap() }); entries0.insert(ImageEntry { id: "id08".to_string(), ver: parser.parse("1.2.10-BR123").unwrap() }); entries0.insert(ImageEntry { id: "id09".to_string(), ver: parser.parse("1.2.2-BR123").unwrap() }); entries0.insert(ImageEntry { id: "id10".to_string(), ver: parser.parse("1.2.2-BR123-SNAPSHOT").unwrap() }); entries0.insert(ImageEntry { id: "id11".to_string(), ver: parser.parse("1.2.1-BR123-SNAPSHOT").unwrap() }); map.insert("repo0".to_string(), entries0); let mut entries1 = HashSet::new(); entries1.insert(ImageEntry { id: "id12".to_string(), ver: parser.parse("2.0").unwrap() }); entries1.insert(ImageEntry { id: "id13".to_string(), ver: parser.parse("2.1").unwrap() }); entries1.insert(ImageEntry { id: "id14".to_string(), ver: parser.parse("2.10").unwrap() }); entries1.insert(ImageEntry { id: "id15".to_string(), ver: parser.parse("2.2").unwrap() }); map.insert("repo1".to_string(), entries1); let images = Images { entries: map }; let mut deleted = HashSet::new(); images.delete(3, 1, |repo, ver| { deleted.insert(format!("{}:{}", repo, ver.to_string())); }); assert_eq!(deleted.len(), 5); assert_eq!(deleted.contains("repo0:1.0"), true); assert_eq!(deleted.contains("repo0:1.1-SNAPSHOT"), true); assert_eq!(deleted.contains("repo0:1.2.0-BR123"), true); assert_eq!(deleted.contains("repo0:1.2.1-BR123-SNAPSHOT"), true); assert_eq!(deleted.contains("repo1:2.0"), true); }
37.793103
111
0.592838
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn delete_test() {\n let parser = version::parser();\n let mut map: HashMap<String, HashSet<image::ImageEntry>> = HashMap::new();\n let mut entries0 = HashSet::new();\n entries0.insert(ImageEntry { id: \"id00\".to_string(), ver: parser.parse(\"1.0\").unwrap() });\n entries0.insert(ImageEntry { id: \"id01\".to_string(), ver: parser.parse(\"1.1\").unwrap() });\n entries0.insert(ImageEntry { id: \"id02\".to_string(), ver: parser.parse(\"1.10\").unwrap() });\n entries0.insert(ImageEntry { id: \"id03\".to_string(), ver: parser.parse(\"1.2\").unwrap() });\n\n entries0.insert(ImageEntry { id: \"id04\".to_string(), ver: parser.parse(\"1.2-SNAPSHOT\").unwrap() });\n entries0.insert(ImageEntry { id: \"id05\".to_string(), ver: parser.parse(\"1.1-SNAPSHOT\").unwrap() });\n\n entries0.insert(ImageEntry { id: \"id06\".to_string(), ver: parser.parse(\"1.2.0-BR123\").unwrap() });\n entries0.insert(ImageEntry { id: \"id07\".to_string(), ver: parser.parse(\"1.2.1-BR123\").unwrap() });\n entries0.insert(ImageEntry { id: \"id08\".to_string(), ver: parser.parse(\"1.2.10-BR123\").unwrap() });\n entries0.insert(ImageEntry { id: \"id09\".to_string(), ver: parser.parse(\"1.2.2-BR123\").unwrap() });\n\n entries0.insert(ImageEntry { id: \"id10\".to_string(), ver: parser.parse(\"1.2.2-BR123-SNAPSHOT\").unwrap() });\n entries0.insert(ImageEntry { id: \"id11\".to_string(), ver: parser.parse(\"1.2.1-BR123-SNAPSHOT\").unwrap() });\n\n map.insert(\"repo0\".to_string(), entries0);\n\n let mut entries1 = HashSet::new();\n entries1.insert(ImageEntry { id: \"id12\".to_string(), ver: parser.parse(\"2.0\").unwrap() });\n entries1.insert(ImageEntry { id: \"id13\".to_string(), ver: parser.parse(\"2.1\").unwrap() });\n entries1.insert(ImageEntry { id: \"id14\".to_string(), ver: parser.parse(\"2.10\").unwrap() });\n entries1.insert(ImageEntry { id: \"id15\".to_string(), ver: parser.parse(\"2.2\").unwrap() });\n\n map.insert(\"repo1\".to_string(), entries1);\n\n let images = Images { entries: map };\n\n let mut deleted = HashSet::new();\n images.delete(3, 1, |repo, ver| {\n deleted.insert(format!(\"{}:{}\", repo, ver.to_string()));\n });\n\n assert_eq!(deleted.len(), 5);\n assert_eq!(deleted.contains(\"repo0:1.0\"), true);\n assert_eq!(deleted.contains(\"repo0:1.1-SNAPSHOT\"), true);\n assert_eq!(deleted.contains(\"repo0:1.2.0-BR123\"), true);\n assert_eq!(deleted.contains(\"repo0:1.2.1-BR123-SNAPSHOT\"), true);\n assert_eq!(deleted.contains(\"repo1:2.0\"), true);\n}\n}" ]
f70dea4da58e2605a6aa3c9e559bb12a5b02e8af
17,196
rs
Rust
src/test.rs
Mapet13/Chip-8-VM-Emulator
a3ad03b338b9159e3b9738ac274489a720f3e5ca
[ "MIT" ]
null
null
null
src/test.rs
Mapet13/Chip-8-VM-Emulator
a3ad03b338b9159e3b9738ac274489a720f3e5ca
[ "MIT" ]
null
null
null
src/test.rs
Mapet13/Chip-8-VM-Emulator
a3ad03b338b9159e3b9738ac274489a720f3e5ca
[ "MIT" ]
null
null
null
#![allow(dead_code)] #![allow(non_snake_case)] #![allow(unused_imports)] use super::*; use crate::instructions::decode_opcode; fn get_vm() -> Chip8VM { Chip8VM { waiting_for_key_press: false, key_index_store: 0x00, display_data: [false; DISPLAY_SIZE[0] * DISPLAY_SIZE[1]], memory: [0 as u8; MEMORY_SIZE], v: [0 as u8; 16], i: 0, delay_timer: 0, sound_timer: 0, program_counter: 0x200, stack_pointer: 0, stack: [0 as u16; 16], pressed_key: None, } } #[test] fn test_00E0() { // 0x00E0 - Clear the screen let opcode = 0x00E0; let mut vm = get_vm(); vm.display_data = [true; DISPLAY_SIZE[0] * DISPLAY_SIZE[1]]; vm.execute_instruction(decode_opcode(opcode), opcode); for i in 0..vm.display_data.len() { assert_eq!(vm.display_data[i], false); } } #[test] fn test_00EE() { // 0x00EE - Return from a subroutine let opcode = 0x00EE; let mut vm = get_vm(); vm.stack[0x0] = 0x200; vm.stack_pointer = 1; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.program_counter, 0x200); assert_eq!(vm.stack_pointer, 0); } #[test] fn test_1NNN() { // 0x1NNN - Jump to address NNN let opcode = 0x1234; let mut vm = get_vm(); vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.program_counter + 2, 0x0234); // add 2 to jump result because in normal execution after jump CPU will increase pc by 2 } #[test] fn test_2NNN() { // 0x2NNN - Execute subroutine starting at address NNN let opcode = 0x2345; let mut vm = get_vm(); let old_pc_value = vm.program_counter; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.program_counter + 2, 0x0345); // add 2 to jump result because in normal execution after jump CPU will increase pc by 2 assert_eq!(vm.stack_pointer, 1); assert_eq!(vm.stack[0x0], old_pc_value); } #[test] fn test_3XNN() { // 0x3XNN - Skip the following instruction if the value of register VX equals NN let opcode = 0x3456; let mut vm = get_vm(); //not equal vm.program_counter = 0x0; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.program_counter, 0x0); //equal vm.program_counter = 0x0; vm.v[0x4] = 0x56; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.program_counter, 0x2); } #[test] fn test_4XNN() { // 0x4XNN - Skip the following instruction if the value of register VX is not equal to NN let opcode = 0x4567; let mut vm = get_vm(); //not equal vm.program_counter = 0x0; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.program_counter, 0x2); //equal vm.program_counter = 0x0; vm.v[0x5] = 0x67; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.program_counter, 0x0); } #[test] fn test_5XY0() { // 0x5XY0 - Skip the following instruction if the value of register VX is equal to the value of register VY let opcode = 0x5670; let mut vm = get_vm(); //not equal vm.v[0x6] = 0x0; vm.v[0x7] = 0x1; vm.program_counter = 0x0; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.program_counter, 0x0); //equal vm.v[0x6] = 0x1; vm.v[0x7] = 0x1; vm.program_counter = 0x0; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.program_counter, 0x2); } #[test] fn test_6XNN() { // 0x6XNN - Store number NN in register VX let opcode = 0x6789; let mut vm = get_vm(); vm.v[0x7] = 0x0; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0x7], 0x89); } #[test] fn test_7XNN() { // 0x7XNN - Add the value NN to register VX let opcode = 0x789A; let mut vm = get_vm(); vm.v[0x8] = 0x11; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0x8], 0xAB); } #[test] fn test_8XY0() { // 0x8XY0 - Store the value of register VY in register VX let opcode = 0x89A0; let mut vm = get_vm(); vm.v[0x9] = 0x99; vm.v[0xA] = 0xAA; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0x9], vm.v[0xA]); assert_eq!(vm.v[0x9], 0xAA); } #[test] fn test_8XY1() { // 0x8XY1 - Set VX to VX OR VY let opcode = 0x89A1; let mut vm = get_vm(); vm.v[0x9] = 0x99; vm.v[0xA] = 0xAA; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0x9], 0x99 | 0xAA); } #[test] fn test_8XY2() { // 0x8XY2 - Set VX to VX AND VY let opcode = 0x89A2; let mut vm = get_vm(); vm.v[0x9] = 0x99; vm.v[0xA] = 0xAA; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0x9], 0x99 & 0xAA); } #[test] fn test_8XY3() { // 0x8XY3 - Set VX to VX XOR VY let opcode = 0x89A3; let mut vm = get_vm(); vm.v[0x9] = 0x99; vm.v[0xA] = 0xAA; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0x9], 0xAA ^ 0x99); } #[test] fn test_8XY4() { // 0x8XY4 - Add the value of register VY to register VX // Set VF to 01 if a carry occurs // Set VF to 00 if a carry does not occur let opcode = 0x89A4; let mut vm = get_vm(); // with borrow vm.v[0x9] = 0x99; vm.v[0xA] = 0xAA; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0x9], 0x43); assert_eq!(vm.v[0xF], 0x1); // without borrow vm.v[0x9] = 0x11; vm.v[0xA] = 0x22; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0x9], 0x33); assert_eq!(vm.v[0xF], 0x0); } #[test] fn test_8XY5() { // 0x8XY5 - Subtract the value of register VY from register VX // Set VF to 00 if a borrow occurs // Set VF to 01 if a borrow does not occur let opcode = 0x89A5; let mut vm = get_vm(); // without borrow vm.v[0x9] = 0xFF; vm.v[0xA] = 0x01; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0x9], 0xFE); assert_eq!(vm.v[0xF], 0x1); // with borrow vm.v[0x9] = 0x01; vm.v[0xA] = 0x02; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0x9], 0xFF); assert_eq!(vm.v[0xF], 0x0); } #[test] fn test_8XY6() { // 0x8XY6 - Store the value of register VY shifted right one bit in register VX // Set register VF to the least significant bit prior to the shift // VY is unchange let opcode = 0x89A6; let mut vm = get_vm(); // the least-significant bit of Vx is 1 vm.v[0xA] = 0xFF; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0x9], 0x7F); assert_eq!(vm.v[0xF], 0x1); // the least-significant bit of Vx is 0 vm.v[0xA] = 0xFE; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0x9], 0x7F); assert_eq!(vm.v[0xF], 0x0); } #[test] fn test_8XY7() { // 0x8XY7 - Set register VX to the value of VY minus VX // Set VF to 00 if a borrow occurs // Set VF to 01 if a borrow does not occur let opcode = 0x89A7; let mut vm = get_vm(); // without borrow vm.v[0x9] = 0x02; vm.v[0xA] = 0x08; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0x9], 0x06); assert_eq!(vm.v[0xF], 0x1); // with borrow vm.v[0x9] = 0x04; vm.v[0xA] = 0x02; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0x9], 0xFE); assert_eq!(vm.v[0xF], 0x0); } #[test] fn test_8XYE() { // 0x8XYE - Store the value of register VY shifted left one bit in register VX // Set register VF to the most significant bit prior to the shift // VY is unchanged let opcode = 0x89AE; let mut vm = get_vm(); // the most-significant bit of Vx is 0 vm.v[0xA] = 0x11; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0xF], 0x00); assert_eq!(vm.v[0xA], 0x11); assert_eq!(vm.v[0x9], 0x22); // the most-significant bit of Vx is 1 vm.v[0xA] = 0x81; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0xA], 0x81); assert_eq!(vm.v[0x9], 0x02); assert_eq!(vm.v[0xF], 0x01); } #[test] fn test_9XY0() { // 0x9XY0 - Skip the following instruction if the value of register VX is not equal to the value of register VY let opcode = 0x9AB0; let mut vm = get_vm(); //equal vm.v[0xA] = 0x1; vm.v[0xB] = 0x1; vm.program_counter = 0x0; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.program_counter, 0x0); //not equal vm.v[0xA] = 0x0; vm.v[0xB] = 0x1; vm.program_counter = 0x0; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.program_counter, 0x2); } #[test] fn test_ANNN() { // 0xANNN - Store memory address NNN in register I let opcode = 0xABCD; let mut vm = get_vm(); vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.i, 0xBCD); } #[test] fn test_BNNN() { // 0xBNNN - Jump to address NNN + V0 let opcode = 0xBCDE; let mut vm = get_vm(); vm.v[0x0] = 0x04; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.program_counter + 2, 0xCDE + 0x04); // add 2 to jump result because in normal execution after jump CPU will increase pc by 2 } #[test] fn test_CXNN() { // 0xCXNN - Set VX to a random number with a mask of NN let opcode = 0xCD00; let mut vm = get_vm(); vm.v[0xD] = 0xFF; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0xD], 0x00); } fn load_test_sprite(memory: &mut [u8; MEMORY_SIZE]) { memory[0xA] = 0xF0; memory[0xB] = 0x90; memory[0xC] = 0xF0; memory[0xD] = 0x90; memory[0xE] = 0xF0; } fn assert_sprite_drawing(display_data: &[bool; DISPLAY_SIZE[0] * DISPLAY_SIZE[1]]) { let assert_pixel = |x, y, expected: bool| { assert_eq!( display_data[y * DISPLAY_SIZE[0] + x], expected, "pixel [{}, {}] should be {}", x, y, expected ); }; assert_pixel(0, 0, true); assert_pixel(1, 0, true); assert_pixel(2, 0, true); assert_pixel(3, 0, true); assert_pixel(0, 1, true); assert_pixel(1, 1, false); assert_pixel(2, 1, false); assert_pixel(3, 1, true); assert_pixel(0, 2, true); assert_pixel(1, 2, true); assert_pixel(2, 2, true); assert_pixel(3, 2, true); assert_pixel(0, 3, true); assert_pixel(1, 3, false); assert_pixel(2, 3, false); assert_pixel(3, 3, true); assert_pixel(0, 4, true); assert_pixel(1, 4, true); assert_pixel(2, 4, true); assert_pixel(3, 4, true); } fn assert_sprite_ereasing(display_data: &[bool; DISPLAY_SIZE[0] * DISPLAY_SIZE[1]]) { let assert_pixel = |x, y, expected: bool| { assert_eq!( display_data[y * DISPLAY_SIZE[0] + x], expected, "pixel [{}, {}] should be {}", x, y, expected ); }; assert_pixel(0, 0, false); assert_pixel(1, 0, false); assert_pixel(2, 0, false); assert_pixel(3, 0, false); assert_pixel(0, 1, false); assert_pixel(1, 1, false); assert_pixel(2, 1, false); assert_pixel(3, 1, false); assert_pixel(0, 2, false); assert_pixel(1, 2, false); assert_pixel(2, 2, false); assert_pixel(3, 2, false); assert_pixel(0, 3, false); assert_pixel(1, 3, false); assert_pixel(2, 3, false); assert_pixel(3, 3, false); assert_pixel(0, 4, false); assert_pixel(1, 4, false); assert_pixel(2, 4, false); assert_pixel(3, 4, false); } #[test] fn test_DXYN() { // 0xDXYN - Draw a sprite at position VX, VY with N bytes of sprite data starting at the address stored in I // Set VF to 01 if any set pixels are changed to unset, and 00 otherwise let opcode = 0xD005; let mut vm = get_vm(); load_test_sprite(&mut vm.memory); vm.i = 0xA; vm.execute_instruction(decode_opcode(opcode), opcode); assert_sprite_drawing(&vm.display_data); assert_eq!(vm.v[0xF], 0x00); vm.execute_instruction(decode_opcode(opcode), opcode); assert_sprite_ereasing(&vm.display_data); assert_eq!(vm.v[0xF], 0x01); } #[test] fn test_EX9E() { // 0xEX9E - skip the following instruction if the key corresponding to the hex value currently stored in register VX is pressed let opcode = 0xEE9E; let mut vm = get_vm(); vm.v[0xE] = 0x01; //is pressed vm.program_counter = 0x00; vm.pressed_key = Some(0x01); vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.program_counter, 0x02); //is not pressed vm.pressed_key = Some(0x02); vm.program_counter = 0x00; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.program_counter, 0x00); } #[test] fn test_EXA1() { // 0xEXA1 - skip the following instruction if the key corresponding to the hex value currently stored in register VX is not pressed let opcode = 0xEEA1; let mut vm = get_vm(); vm.v[0xE] = 0x01; //is pressed vm.program_counter = 0x00; vm.pressed_key = Some(0x01); vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.program_counter, 0x00); //is not pressed vm.pressed_key = Some(0x02); vm.program_counter = 0x00; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.program_counter, 0x02); } #[test] fn test_FX07() { // 0xFX07 - Store the current value of the delay timer in register VX let opcode = 0xF007; let mut vm = get_vm(); vm.delay_timer = 0x32; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0x0], vm.delay_timer); } #[test] fn test_FX0A() { // 0xFX0A - skip the following instruction if the key corresponding to the hex value currently stored in register VX is not pressed let opcode = 0xF00A; let mut vm = get_vm(); vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.waiting_for_key_press, true); } #[test] fn test_FX15() { // 0xFX15 - Set the delay timer to the value of register VX let opcode = 0xF015; let mut vm = get_vm(); vm.delay_timer = 0x22; vm.v[0x0] = 0x33; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.delay_timer, vm.v[0x0]); assert_eq!(vm.delay_timer, 0x33); } #[test] fn test_FX18() { // 0xEXA1 - Set the sound timer to the value of register VX let opcode = 0xF018; let mut vm = get_vm(); vm.sound_timer = 0x22; vm.v[0x0] = 0x33; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.sound_timer, vm.v[0x0]); assert_eq!(vm.sound_timer, 0x33); } #[test] fn test_FX1E() { // 0xEX1E - sAdd the value stored in register VX to register I let opcode = 0xF01E; let mut vm = get_vm(); vm.i = 0x22; vm.v[0x0] = 0x33; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.i, 0x55); } #[test] fn test_FX29() { // 0xFX29 - Set I to the memory address of the sprite data corresponding to the hexadecimal digit stored in register VX let opcode = 0xF029; let mut vm = get_vm(); vm.v[0x0] = 0x10; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.i, 0x50); } #[test] fn test_FX33() { // 0xFX33 - Store the binary-coded decimal equivalent of the value stored in register VX at addresses I, I + 1, and I + 2 let opcode = 0xF033; let mut vm = get_vm(); vm.v[0x0] = 123; vm.i = 0x0; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.memory[vm.i as usize], 1); assert_eq!(vm.memory[vm.i as usize + 1], 2); assert_eq!(vm.memory[vm.i as usize + 2], 3); } #[test] fn test_FX55() { // 0xFX55 - Store the values of registers V0 to VX inclusive in memory starting at address I // I is set to I + X + 1 after operation let opcode = 0xF455; let mut vm = get_vm(); vm.i = 0x0; vm.v[0x0] = 0x0; vm.v[0x1] = 0x1; vm.v[0x2] = 0x2; vm.v[0x3] = 0x3; vm.v[0x4] = 0x4; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.memory[0x0], 0x00); assert_eq!(vm.memory[0x1], 0x01); assert_eq!(vm.memory[0x2], 0x02); assert_eq!(vm.memory[0x3], 0x03); assert_eq!(vm.memory[0x4], 0x04); assert_eq!(vm.i, 0x05); } #[test] fn test_FX65() { // 0xFX65 - Fill registers V0 to VX inclusive with the values stored in memory starting at address I let opcode = 0xF465; let mut vm = get_vm(); vm.i = 0x0; vm.memory[0x0] = 0x0; vm.memory[0x1] = 0x1; vm.memory[0x2] = 0x2; vm.memory[0x3] = 0x3; vm.memory[0x4] = 0x4; vm.execute_instruction(decode_opcode(opcode), opcode); assert_eq!(vm.v[0x0], 0x00); assert_eq!(vm.v[0x1], 0x01); assert_eq!(vm.v[0x2], 0x02); assert_eq!(vm.v[0x3], 0x03); assert_eq!(vm.v[0x4], 0x04); assert_eq!(vm.i, 0x05); }
25.400295
142
0.625901
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_00E0() {\n // 0x00E0 - Clear the screen\n\n let opcode = 0x00E0;\n let mut vm = get_vm();\n\n vm.display_data = [true; DISPLAY_SIZE[0] * DISPLAY_SIZE[1]];\n\n vm.execute_instruction(decode_opcode(opcode), opcode);\n\n for i in 0..vm.display_data.len() {\n assert_eq!(vm.display_data[i], false);\n }\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_00EE() {\n // 0x00EE - Return from a subroutine\n\n let opcode = 0x00EE;\n let mut vm = get_vm();\n\n vm.stack[0x0] = 0x200;\n vm.stack_pointer = 1;\n\n vm.execute_instruction(decode_opcode(opcode), opcode);\n\n assert_eq!(vm.program_counter, 0x200);\n assert_eq!(vm.stack_pointer, 0);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_1NNN() {\n // 0x1NNN - Jump to address NNN\n\n let opcode = 0x1234;\n let mut vm = get_vm();\n\n vm.execute_instruction(decode_opcode(opcode), opcode);\n\n assert_eq!(vm.program_counter + 2, 0x0234); // add 2 to jump result because in normal execution after jump CPU will increase pc by 2\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_2NNN() {\n // 0x2NNN - Execute subroutine starting at address NNN\n\n let opcode = 0x2345;\n let mut vm = get_vm();\n\n let old_pc_value = vm.program_counter;\n\n vm.execute_instruction(decode_opcode(opcode), opcode);\n\n assert_eq!(vm.program_counter + 2, 0x0345); // add 2 to jump result because in normal execution after jump CPU will increase pc by 2\n assert_eq!(vm.stack_pointer, 1);\n assert_eq!(vm.stack[0x0], old_pc_value);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_3XNN() {\n // 0x3XNN - Skip the following instruction if the value of register VX equals NN\n\n let opcode = 0x3456;\n let mut vm = get_vm();\n\n //not equal\n vm.program_counter = 0x0;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.program_counter, 0x0);\n\n //equal\n vm.program_counter = 0x0;\n vm.v[0x4] = 0x56;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.program_counter, 0x2);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_4XNN() {\n // 0x4XNN - Skip the following instruction if the value of register VX is not equal to NN\n\n let opcode = 0x4567;\n let mut vm = get_vm();\n\n //not equal\n vm.program_counter = 0x0;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.program_counter, 0x2);\n\n //equal\n vm.program_counter = 0x0;\n vm.v[0x5] = 0x67;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.program_counter, 0x0);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_5XY0() {\n // 0x5XY0 - Skip the following instruction if the value of register VX is equal to the value of register VY\n\n let opcode = 0x5670;\n let mut vm = get_vm();\n\n //not equal\n vm.v[0x6] = 0x0;\n vm.v[0x7] = 0x1;\n vm.program_counter = 0x0;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.program_counter, 0x0);\n //equal\n vm.v[0x6] = 0x1;\n vm.v[0x7] = 0x1;\n vm.program_counter = 0x0;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.program_counter, 0x2);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_6XNN() {\n // 0x6XNN - Store number NN in register VX\n\n let opcode = 0x6789;\n let mut vm = get_vm();\n\n vm.v[0x7] = 0x0;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n\n assert_eq!(vm.v[0x7], 0x89);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_7XNN() {\n // 0x7XNN - Add the value NN to register VX\n\n let opcode = 0x789A;\n let mut vm = get_vm();\n\n vm.v[0x8] = 0x11;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n\n assert_eq!(vm.v[0x8], 0xAB);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_8XY0() {\n // 0x8XY0 - Store the value of register VY in register VX\n\n let opcode = 0x89A0;\n let mut vm = get_vm();\n\n vm.v[0x9] = 0x99;\n vm.v[0xA] = 0xAA;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n\n assert_eq!(vm.v[0x9], vm.v[0xA]);\n assert_eq!(vm.v[0x9], 0xAA);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_8XY1() {\n // 0x8XY1 - Set VX to VX OR VY\n\n let opcode = 0x89A1;\n let mut vm = get_vm();\n\n vm.v[0x9] = 0x99;\n vm.v[0xA] = 0xAA;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n\n assert_eq!(vm.v[0x9], 0x99 | 0xAA);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_8XY2() {\n // 0x8XY2 - Set VX to VX AND VY\n\n let opcode = 0x89A2;\n let mut vm = get_vm();\n\n vm.v[0x9] = 0x99;\n vm.v[0xA] = 0xAA;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n\n assert_eq!(vm.v[0x9], 0x99 & 0xAA);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_8XY3() {\n // 0x8XY3 - Set VX to VX XOR VY\n\n let opcode = 0x89A3;\n let mut vm = get_vm();\n\n vm.v[0x9] = 0x99;\n vm.v[0xA] = 0xAA;\n\n vm.execute_instruction(decode_opcode(opcode), opcode);\n\n assert_eq!(vm.v[0x9], 0xAA ^ 0x99);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_8XY4() {\n // 0x8XY4 - Add the value of register VY to register VX\n // Set VF to 01 if a carry occurs\n // Set VF to 00 if a carry does not occur\n\n let opcode = 0x89A4;\n let mut vm = get_vm();\n\n // with borrow\n vm.v[0x9] = 0x99;\n vm.v[0xA] = 0xAA;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.v[0x9], 0x43);\n assert_eq!(vm.v[0xF], 0x1);\n\n // without borrow\n vm.v[0x9] = 0x11;\n vm.v[0xA] = 0x22;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.v[0x9], 0x33);\n assert_eq!(vm.v[0xF], 0x0);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_8XY5() {\n // 0x8XY5 - Subtract the value of register VY from register VX\n // Set VF to 00 if a borrow occurs\n // Set VF to 01 if a borrow does not occur\n\n let opcode = 0x89A5;\n let mut vm = get_vm();\n\n // without borrow\n vm.v[0x9] = 0xFF;\n vm.v[0xA] = 0x01;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.v[0x9], 0xFE);\n assert_eq!(vm.v[0xF], 0x1);\n\n // with borrow\n vm.v[0x9] = 0x01;\n vm.v[0xA] = 0x02;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.v[0x9], 0xFF);\n assert_eq!(vm.v[0xF], 0x0);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_8XY6() {\n // 0x8XY6 - Store the value of register VY shifted right one bit in register VX\n // Set register VF to the least significant bit prior to the shift\n // VY is unchange\n\n let opcode = 0x89A6;\n let mut vm = get_vm();\n\n // the least-significant bit of Vx is 1\n vm.v[0xA] = 0xFF;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.v[0x9], 0x7F);\n assert_eq!(vm.v[0xF], 0x1);\n\n // the least-significant bit of Vx is 0\n vm.v[0xA] = 0xFE;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.v[0x9], 0x7F);\n assert_eq!(vm.v[0xF], 0x0);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_8XY7() {\n // 0x8XY7 - Set register VX to the value of VY minus VX\n // Set VF to 00 if a borrow occurs\n // Set VF to 01 if a borrow does not occur\n\n let opcode = 0x89A7;\n let mut vm = get_vm();\n\n // without borrow\n vm.v[0x9] = 0x02;\n vm.v[0xA] = 0x08;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.v[0x9], 0x06);\n assert_eq!(vm.v[0xF], 0x1);\n\n // with borrow\n vm.v[0x9] = 0x04;\n vm.v[0xA] = 0x02;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.v[0x9], 0xFE);\n assert_eq!(vm.v[0xF], 0x0);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_8XYE() {\n // 0x8XYE - Store the value of register VY shifted left one bit in register VX\n // Set register VF to the most significant bit prior to the shift\n // VY is unchanged\n\n let opcode = 0x89AE;\n let mut vm = get_vm();\n\n // the most-significant bit of Vx is 0\n vm.v[0xA] = 0x11;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.v[0xF], 0x00);\n assert_eq!(vm.v[0xA], 0x11);\n assert_eq!(vm.v[0x9], 0x22);\n\n // the most-significant bit of Vx is 1\n vm.v[0xA] = 0x81;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.v[0xA], 0x81);\n assert_eq!(vm.v[0x9], 0x02);\n assert_eq!(vm.v[0xF], 0x01);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_9XY0() {\n // 0x9XY0 - Skip the following instruction if the value of register VX is not equal to the value of register VY\n\n let opcode = 0x9AB0;\n let mut vm = get_vm();\n\n //equal\n vm.v[0xA] = 0x1;\n vm.v[0xB] = 0x1;\n vm.program_counter = 0x0;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.program_counter, 0x0);\n //not equal\n vm.v[0xA] = 0x0;\n vm.v[0xB] = 0x1;\n vm.program_counter = 0x0;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.program_counter, 0x2);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_ANNN() {\n // 0xANNN - Store memory address NNN in register I\n\n let opcode = 0xABCD;\n let mut vm = get_vm();\n\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.i, 0xBCD);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_BNNN() {\n // 0xBNNN - Jump to address NNN + V0\n\n let opcode = 0xBCDE;\n let mut vm = get_vm();\n vm.v[0x0] = 0x04;\n\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.program_counter + 2, 0xCDE + 0x04); // add 2 to jump result because in normal execution after jump CPU will increase pc by 2\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_CXNN() {\n // 0xCXNN - Set VX to a random number with a mask of NN\n\n let opcode = 0xCD00;\n let mut vm = get_vm();\n vm.v[0xD] = 0xFF;\n\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.v[0xD], 0x00);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_DXYN() {\n // 0xDXYN - Draw a sprite at position VX, VY with N bytes of sprite data starting at the address stored in I\n // Set VF to 01 if any set pixels are changed to unset, and 00 otherwise\n\n let opcode = 0xD005;\n let mut vm = get_vm();\n\n load_test_sprite(&mut vm.memory);\n vm.i = 0xA;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n\n assert_sprite_drawing(&vm.display_data);\n assert_eq!(vm.v[0xF], 0x00);\n\n vm.execute_instruction(decode_opcode(opcode), opcode);\n\n assert_sprite_ereasing(&vm.display_data);\n assert_eq!(vm.v[0xF], 0x01);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_EX9E() {\n // 0xEX9E - skip the following instruction if the key corresponding to the hex value currently stored in register VX is pressed\n\n let opcode = 0xEE9E;\n let mut vm = get_vm();\n\n vm.v[0xE] = 0x01;\n //is pressed\n vm.program_counter = 0x00;\n vm.pressed_key = Some(0x01);\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.program_counter, 0x02);\n\n //is not pressed\n vm.pressed_key = Some(0x02);\n vm.program_counter = 0x00;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.program_counter, 0x00);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_EXA1() {\n // 0xEXA1 - skip the following instruction if the key corresponding to the hex value currently stored in register VX is not pressed\n\n let opcode = 0xEEA1;\n let mut vm = get_vm();\n\n vm.v[0xE] = 0x01;\n //is pressed\n vm.program_counter = 0x00;\n vm.pressed_key = Some(0x01);\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.program_counter, 0x00);\n\n //is not pressed\n vm.pressed_key = Some(0x02);\n vm.program_counter = 0x00;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.program_counter, 0x02);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_FX07() {\n // 0xFX07 - Store the current value of the delay timer in register VX\n\n let opcode = 0xF007;\n let mut vm = get_vm();\n\n vm.delay_timer = 0x32;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.v[0x0], vm.delay_timer);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_FX0A() {\n // 0xFX0A - skip the following instruction if the key corresponding to the hex value currently stored in register VX is not pressed\n\n let opcode = 0xF00A;\n let mut vm = get_vm();\n\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.waiting_for_key_press, true);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_FX15() {\n // 0xFX15 - Set the delay timer to the value of register VX\n\n let opcode = 0xF015;\n let mut vm = get_vm();\n\n vm.delay_timer = 0x22;\n vm.v[0x0] = 0x33;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.delay_timer, vm.v[0x0]);\n assert_eq!(vm.delay_timer, 0x33);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_FX18() {\n // 0xEXA1 - Set the sound timer to the value of register VX\n\n let opcode = 0xF018;\n let mut vm = get_vm();\n\n vm.sound_timer = 0x22;\n vm.v[0x0] = 0x33;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.sound_timer, vm.v[0x0]);\n assert_eq!(vm.sound_timer, 0x33);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_FX1E() {\n // 0xEX1E - sAdd the value stored in register VX to register I\n\n let opcode = 0xF01E;\n let mut vm = get_vm();\n vm.i = 0x22;\n vm.v[0x0] = 0x33;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.i, 0x55);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_FX29() {\n // 0xFX29 - Set I to the memory address of the sprite data corresponding to the hexadecimal digit stored in register VX\n\n let opcode = 0xF029;\n let mut vm = get_vm();\n\n vm.v[0x0] = 0x10;\n vm.execute_instruction(decode_opcode(opcode), opcode);\n\n assert_eq!(vm.i, 0x50);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_FX33() {\n // 0xFX33 - Store the binary-coded decimal equivalent of the value stored in register VX at addresses I, I + 1, and I + 2\n\n let opcode = 0xF033;\n let mut vm = get_vm();\n\n vm.v[0x0] = 123;\n vm.i = 0x0;\n\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.memory[vm.i as usize], 1);\n assert_eq!(vm.memory[vm.i as usize + 1], 2);\n assert_eq!(vm.memory[vm.i as usize + 2], 3);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_FX55() {\n // 0xFX55 - Store the values of registers V0 to VX inclusive in memory starting at address I\n // I is set to I + X + 1 after operation\n\n let opcode = 0xF455;\n let mut vm = get_vm();\n\n vm.i = 0x0;\n vm.v[0x0] = 0x0;\n vm.v[0x1] = 0x1;\n vm.v[0x2] = 0x2;\n vm.v[0x3] = 0x3;\n vm.v[0x4] = 0x4;\n\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.memory[0x0], 0x00);\n assert_eq!(vm.memory[0x1], 0x01);\n assert_eq!(vm.memory[0x2], 0x02);\n assert_eq!(vm.memory[0x3], 0x03);\n assert_eq!(vm.memory[0x4], 0x04);\n assert_eq!(vm.i, 0x05);\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_FX65() {\n // 0xFX65 - Fill registers V0 to VX inclusive with the values stored in memory starting at address I\n\n let opcode = 0xF465;\n let mut vm = get_vm();\n\n vm.i = 0x0;\n vm.memory[0x0] = 0x0;\n vm.memory[0x1] = 0x1;\n vm.memory[0x2] = 0x2;\n vm.memory[0x3] = 0x3;\n vm.memory[0x4] = 0x4;\n\n vm.execute_instruction(decode_opcode(opcode), opcode);\n assert_eq!(vm.v[0x0], 0x00);\n assert_eq!(vm.v[0x1], 0x01);\n assert_eq!(vm.v[0x2], 0x02);\n assert_eq!(vm.v[0x3], 0x03);\n assert_eq!(vm.v[0x4], 0x04);\n assert_eq!(vm.i, 0x05);\n}\n}" ]
f70df399b0a72a78e3e37b91ac3e1749fedfca9e
29,263
rs
Rust
src/log_specification.rs
petreeftime/flexi_logger
6ccc89f9aa12b15971ab82bd0e7456fc3d7d127d
[ "Apache-2.0", "MIT" ]
null
null
null
src/log_specification.rs
petreeftime/flexi_logger
6ccc89f9aa12b15971ab82bd0e7456fc3d7d127d
[ "Apache-2.0", "MIT" ]
null
null
null
src/log_specification.rs
petreeftime/flexi_logger
6ccc89f9aa12b15971ab82bd0e7456fc3d7d127d
[ "Apache-2.0", "MIT" ]
null
null
null
use crate::flexi_error::FlexiLoggerError; use crate::LevelFilter; #[cfg(feature = "textfilter")] use regex::Regex; use std::collections::HashMap; use std::env; /// /// Immutable struct that defines which loglines are to be written, /// based on the module, the log level, and the text. /// /// The loglevel specification via string (relevant for methods /// [parse()](struct.LogSpecification.html#method.parse) and /// [env()](struct.LogSpecification.html#method.env)) /// works essentially like with `env_logger`, /// but we are a bit more tolerant with spaces. Its functionality can be /// described with some Backus-Naur-form: /// /// ```text /// <log_level_spec> ::= single_log_level_spec[{,single_log_level_spec}][/<text_filter>] /// <single_log_level_spec> ::= <path_to_module>|<log_level>|<path_to_module>=<log_level> /// <text_filter> ::= <regex> /// ``` /// /// * Examples: /// /// * `"info"`: all logs with info, warn, or error level are written /// * `"crate1"`: all logs of this crate are written, but nothing else /// * `"warn, crate2::mod_a=debug, mod_x::mod_y=trace"`: all crates log warnings and errors, /// `mod_a` additionally debug messages, and `mod_x::mod_y` is fully traced /// /// * If you just specify the module, without `log_level`, all levels will be traced for this /// module. /// * If you just specify a log level, this will be applied as default to all modules without /// explicit log level assigment. /// (You see that for modules named error, warn, info, debug or trace, /// it is necessary to specify their loglevel explicitly). /// * The module names are compared as Strings, with the side effect that a specified module filter /// affects all modules whose name starts with this String.<br> /// Example: ```"foo"``` affects e.g. /// /// * `foo` /// * `foo::bar` /// * `foobaz` (!) /// * `foobaz::bar` (!) /// /// The optional text filter is applied for all modules. /// /// Note that external module names are to be specified like in ```"extern crate ..."```, i.e., /// for crates with a dash in their name this means: the dash is to be replaced with /// the underscore (e.g. ```karl_heinz```, not ```karl-heinz```). #[derive(Clone, Debug, Default)] pub struct LogSpecification { module_filters: Vec<ModuleFilter>, #[cfg(feature = "textfilter")] textfilter: Option<Regex>, } /// Defines which loglevel filter to use for the specified module. /// /// A `ModuleFilter`, whose `module_name` is not set, describes the default loglevel filter. #[derive(Clone, Debug, Eq, PartialEq)] pub struct ModuleFilter { /// The module name. pub module_name: Option<String>, /// The level filter. pub level_filter: LevelFilter, } impl LogSpecification { pub(crate) fn update_from(&mut self, other: Self) { self.module_filters = other.module_filters; #[cfg(feature = "textfilter")] { self.textfilter = other.textfilter; } } pub(crate) fn max_level(&self) -> log::LevelFilter { self.module_filters .iter() .map(|d| d.level_filter) .max() .unwrap_or(log::LevelFilter::Off) } /// Returns true if messages on the specified level from the writing module should be written pub fn enabled(&self, level: log::Level, writing_module: &str) -> bool { // Search for the longest match, the vector is assumed to be pre-sorted. for module_filter in &self.module_filters { match module_filter.module_name { Some(ref module_name) => { if writing_module.starts_with(module_name) { return level <= module_filter.level_filter; } } None => return level <= module_filter.level_filter, } } false } /// Returns a `LogSpecification` where all traces are switched off. #[must_use] pub fn off() -> Self { #[allow(clippy::default_trait_access)] Default::default() } /// Returns a log specification from a String. /// /// # Errors /// /// `FlexiLoggerError::Parse` if the input is malformed. pub fn parse(spec: &str) -> Result<Self, FlexiLoggerError> { let mut parse_errs = Vec::<String>::new(); let mut dirs = Vec::<ModuleFilter>::new(); let mut parts = spec.split('/'); let mods = parts.next(); #[cfg(feature = "textfilter")] let filter = parts.next(); if parts.next().is_some() { push_err( format!("invalid log spec '{}' (too many '/'s), ignoring it", spec), &mut parse_errs, ); return parse_err(parse_errs, Self::off()); } if let Some(m) = mods { for s in m.split(',') { let s = s.trim(); if s.is_empty() { continue; } let mut parts = s.split('='); let (log_level, name) = match ( parts.next().map(str::trim), parts.next().map(str::trim), parts.next(), ) { (Some(part_0), None, None) => { if contains_dash_or_whitespace(part_0, &mut parse_errs) { continue; } // if the single argument is a log-level string or number, // treat that as a global fallback setting match parse_level_filter(part_0.trim()) { Ok(num) => (num, None), Err(_) => (LevelFilter::max(), Some(part_0)), } } (Some(part_0), Some(""), None) => { if contains_dash_or_whitespace(part_0, &mut parse_errs) { continue; } (LevelFilter::max(), Some(part_0)) } (Some(part_0), Some(part_1), None) => { if contains_dash_or_whitespace(part_0, &mut parse_errs) { continue; } match parse_level_filter(part_1.trim()) { Ok(num) => (num, Some(part_0.trim())), Err(e) => { push_err(e.to_string(), &mut parse_errs); continue; } } } _ => { push_err( format!("invalid part in log spec '{}', ignoring it", s), &mut parse_errs, ); continue; } }; dirs.push(ModuleFilter { module_name: name.map(ToString::to_string), level_filter: log_level, }); } } #[cfg(feature = "textfilter")] let textfilter = filter.and_then(|filter| match Regex::new(filter) { Ok(re) => Some(re), Err(e) => { push_err(format!("invalid regex filter - {}", e), &mut parse_errs); None } }); let logspec = Self { module_filters: dirs.level_sort(), #[cfg(feature = "textfilter")] textfilter, }; if parse_errs.is_empty() { Ok(logspec) } else { Err(FlexiLoggerError::Parse(parse_errs, logspec)) } } /// Returns a log specification based on the value of the environment variable `RUST_LOG`, /// or an empty one. /// /// # Errors /// /// `FlexiLoggerError::Parse` if the input is malformed. pub fn env() -> Result<Self, FlexiLoggerError> { match env::var("RUST_LOG") { Ok(spec) => Self::parse(&spec), Err(..) => Ok(Self::off()), } } /// Returns a log specification based on the value of the environment variable `RUST_LOG`, /// or on the given String. /// /// # Errors /// /// `FlexiLoggerError::Parse` if the input is malformed. pub fn env_or_parse<S: AsRef<str>>(given_spec: S) -> Result<Self, FlexiLoggerError> { match env::var("RUST_LOG") { Ok(spec) => Self::parse(&spec), Err(..) => Self::parse(given_spec.as_ref()), } } /// Reads a log specification from an appropriate toml document. /// /// This method is only avaible with feature `specfile`. /// /// # Errors /// /// `FlexiLoggerError::Parse` if the input is malformed. #[cfg(feature = "specfile")] pub fn from_toml(s: &str) -> Result<Self, FlexiLoggerError> { #[derive(Clone, Debug, serde_derive::Deserialize)] struct LogSpecFileFormat { pub global_level: Option<String>, pub global_pattern: Option<String>, pub modules: std::collections::BTreeMap<String, String>, } let logspec_ff: LogSpecFileFormat = toml::from_str(s)?; let mut parse_errs = Vec::<String>::new(); let mut module_filters = Vec::<ModuleFilter>::new(); if let Some(s) = logspec_ff.global_level { module_filters.push(ModuleFilter { module_name: None, level_filter: parse_level_filter(s)?, }); } for (k, v) in logspec_ff.modules { module_filters.push(ModuleFilter { module_name: Some(k), level_filter: parse_level_filter(v)?, }); } #[cfg(feature = "textfilter")] let textfilter = match logspec_ff.global_pattern { None => None, Some(s) => match Regex::new(&s) { Ok(re) => Some(re), Err(e) => { push_err(format!("invalid regex filter - {}", e), &mut parse_errs); None } }, }; let logspec = Self { module_filters: module_filters.level_sort(), #[cfg(feature = "textfilter")] textfilter, }; if parse_errs.is_empty() { Ok(logspec) } else { Err(FlexiLoggerError::Parse(parse_errs, logspec)) } } /// Serializes itself in toml format. /// /// This method is only avaible with feature `specfile`. /// /// # Errors /// /// `FlexiLoggerError::Io` if writing fails. #[cfg(feature = "specfile")] pub fn to_toml(&self, w: &mut dyn std::io::Write) -> Result<(), FlexiLoggerError> { w.write_all(b"### Optional: Default log level\n")?; let last = self.module_filters.last(); if last.is_some() && last.as_ref().unwrap().module_name.is_none() { w.write_all( format!( "global_level = '{}'\n", last.as_ref() .unwrap() .level_filter .to_string() .to_lowercase() ) .as_bytes(), )?; } else { w.write_all(b"#global_level = 'info'\n")?; } w.write_all( b"\n### Optional: specify a regular expression to suppress all messages that don't match\n", )?; w.write_all(b"#global_pattern = 'foo'\n")?; w.write_all( b"\n### Specific log levels per module are optionally defined in this section\n", )?; w.write_all(b"[modules]\n")?; if self.module_filters.is_empty() || self.module_filters[0].module_name.is_none() { w.write_all(b"#'mod1' = 'warn'\n")?; w.write_all(b"#'mod2' = 'debug'\n")?; w.write_all(b"#'mod2::mod3' = 'trace'\n")?; } for mf in &self.module_filters { if mf.module_name.is_some() { w.write_all( format!( "'{}' = '{}'\n", mf.module_name.as_ref().unwrap(), mf.level_filter.to_string().to_lowercase() ) .as_bytes(), )?; } } Ok(()) } /// Creates a `LogSpecBuilder`, setting the default log level. #[must_use] pub fn default(level_filter: LevelFilter) -> LogSpecBuilder { LogSpecBuilder::from_module_filters(&[ModuleFilter { module_name: None, level_filter, }]) } /// Provides a reference to the module filters. pub fn module_filters(&self) -> &Vec<ModuleFilter> { &self.module_filters } /// Provides a reference to the text filter. #[cfg(feature = "textfilter")] pub fn text_filter(&self) -> &Option<Regex> { &(self.textfilter) } } fn push_err(s: String, parse_errs: &mut Vec<String>) { println!("flexi_logger warning: {}", s); parse_errs.push(s); } fn parse_err( errors: Vec<String>, logspec: LogSpecification, ) -> Result<LogSpecification, FlexiLoggerError> { Err(FlexiLoggerError::Parse(errors, logspec)) } // #[cfg(feature = "specfile")] fn parse_level_filter<S: AsRef<str>>(s: S) -> Result<LevelFilter, FlexiLoggerError> { match s.as_ref().to_lowercase().as_ref() { "off" => Ok(LevelFilter::Off), "error" => Ok(LevelFilter::Error), "warn" => Ok(LevelFilter::Warn), "info" => Ok(LevelFilter::Info), "debug" => Ok(LevelFilter::Debug), "trace" => Ok(LevelFilter::Trace), _ => Err(FlexiLoggerError::LevelFilter(format!( "unknown level filter: {}", s.as_ref() ))), } } fn contains_dash_or_whitespace(s: &str, parse_errs: &mut Vec<String>) -> bool { let result = s.find('-').is_some() || s.find(' ').is_some() || s.find('\t').is_some(); if result { push_err( format!( "ignoring invalid part in log spec '{}' (contains a dash or whitespace)", s ), parse_errs, ); } result } #[allow(clippy::needless_doctest_main)] /// Builder for `LogSpecification`. /// /// # Example /// /// Use the reconfigurability feature and build the log spec programmatically. /// /// ```rust /// use flexi_logger::{Logger, LogSpecBuilder}; /// use log::LevelFilter; /// /// fn main() { /// // Build the initial log specification /// let mut builder = LogSpecBuilder::new(); // default is LevelFilter::Off /// builder.default(LevelFilter::Info); /// builder.module("karl", LevelFilter::Debug); /// /// // Initialize Logger, keep builder alive /// let mut logger_reconf_handle = Logger::with(builder.build()) /// // your logger configuration goes here, as usual /// .start() /// .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); /// /// // ... /// /// // Modify builder and update the logger /// builder.default(LevelFilter::Error); /// builder.remove("karl"); /// builder.module("emma", LevelFilter::Trace); /// /// logger_reconf_handle.set_new_spec(builder.build()); /// /// // ... /// } /// ``` #[derive(Clone, Debug, Default)] pub struct LogSpecBuilder { module_filters: HashMap<Option<String>, LevelFilter>, } impl LogSpecBuilder { /// Creates a `LogSpecBuilder` with all logging turned off. #[must_use] pub fn new() -> Self { let mut modfilmap = HashMap::new(); modfilmap.insert(None, LevelFilter::Off); Self { module_filters: modfilmap, } } /// Creates a `LogSpecBuilder` from given module filters. #[must_use] pub fn from_module_filters(module_filters: &[ModuleFilter]) -> Self { let mut modfilmap = HashMap::new(); for mf in module_filters { modfilmap.insert(mf.module_name.clone(), mf.level_filter); } Self { module_filters: modfilmap, } } /// Adds a default log level filter, or updates the default log level filter. pub fn default(&mut self, lf: LevelFilter) -> &mut Self { self.module_filters.insert(None, lf); self } /// Adds a log level filter, or updates the log level filter, for a module. pub fn module<M: AsRef<str>>(&mut self, module_name: M, lf: LevelFilter) -> &mut Self { self.module_filters .insert(Some(module_name.as_ref().to_owned()), lf); self } /// Adds a log level filter, or updates the log level filter, for a module. pub fn remove<M: AsRef<str>>(&mut self, module_name: M) -> &mut Self { self.module_filters .remove(&Some(module_name.as_ref().to_owned())); self } /// Creates a log specification without text filter. #[must_use] pub fn finalize(self) -> LogSpecification { LogSpecification { module_filters: self.module_filters.into_vec_module_filter(), #[cfg(feature = "textfilter")] textfilter: None, } } /// Creates a log specification with text filter. #[cfg(feature = "textfilter")] pub fn finalize_with_textfilter(self, tf: Regex) -> LogSpecification { LogSpecification { module_filters: self.module_filters.into_vec_module_filter(), textfilter: Some(tf), } } /// Creates a log specification without being consumed. #[must_use] pub fn build(&self) -> LogSpecification { LogSpecification { module_filters: self.module_filters.clone().into_vec_module_filter(), #[cfg(feature = "textfilter")] textfilter: None, } } /// Creates a log specification without being consumed, optionally with a text filter. #[cfg(feature = "textfilter")] pub fn build_with_textfilter(&self, tf: Option<Regex>) -> LogSpecification { LogSpecification { module_filters: self.module_filters.clone().into_vec_module_filter(), textfilter: tf, } } } trait IntoVecModuleFilter { fn into_vec_module_filter(self) -> Vec<ModuleFilter>; } impl IntoVecModuleFilter for HashMap<Option<String>, LevelFilter> { fn into_vec_module_filter(self) -> Vec<ModuleFilter> { let mf: Vec<ModuleFilter> = self .into_iter() .map(|(k, v)| ModuleFilter { module_name: k, level_filter: v, }) .collect(); mf.level_sort() } } trait LevelSort { fn level_sort(self) -> Vec<ModuleFilter>; } impl LevelSort for Vec<ModuleFilter> { /// Sort the module filters by length of their name, /// this allows a little more efficient lookup at runtime. fn level_sort(mut self) -> Vec<ModuleFilter> { self.sort_by(|a, b| { let a_len = a.module_name.as_ref().map_or(0, String::len); let b_len = b.module_name.as_ref().map_or(0, String::len); b_len.cmp(&a_len) }); self } } #[cfg(test)] mod tests { use crate::LogSpecification; use log::{Level, LevelFilter}; #[test] fn parse_logging_spec_valid() { let spec = LogSpecification::parse("crate1::mod1=error,crate1::mod2,crate2=debug").unwrap(); assert_eq!(spec.module_filters().len(), 3); assert_eq!( spec.module_filters()[0].module_name, Some("crate1::mod1".to_string()) ); assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Error); assert_eq!( spec.module_filters()[1].module_name, Some("crate1::mod2".to_string()) ); assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::max()); assert_eq!( spec.module_filters()[2].module_name, Some("crate2".to_string()) ); assert_eq!(spec.module_filters()[2].level_filter, LevelFilter::Debug); #[cfg(feature = "textfilter")] assert!(spec.text_filter().is_none()); } #[test] fn parse_logging_spec_invalid_crate() { // test parse_logging_spec with multiple = in specification assert!(LogSpecification::parse("crate1::mod1=warn=info,crate2=debug").is_err()); } #[test] fn parse_logging_spec_wrong_log_level() { assert!(LogSpecification::parse("crate1::mod1=wrong, crate2=warn").is_err()); } #[test] fn parse_logging_spec_empty_log_level() { assert!(LogSpecification::parse("crate1::mod1=wrong, crate2=").is_err()); } #[test] fn parse_logging_spec_global() { let spec = LogSpecification::parse("warn,crate2=debug").unwrap(); assert_eq!(spec.module_filters().len(), 2); assert_eq!(spec.module_filters()[1].module_name, None); assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::Warn); assert_eq!( spec.module_filters()[0].module_name, Some("crate2".to_string()) ); assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Debug); #[cfg(feature = "textfilter")] assert!(spec.text_filter().is_none()); } #[test] #[cfg(feature = "textfilter")] fn parse_logging_spec_valid_filter() { let spec = LogSpecification::parse(" crate1::mod1 = error , crate1::mod2,crate2=debug/abc") .unwrap(); assert_eq!(spec.module_filters().len(), 3); assert_eq!( spec.module_filters()[0].module_name, Some("crate1::mod1".to_string()) ); assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Error); assert_eq!( spec.module_filters()[1].module_name, Some("crate1::mod2".to_string()) ); assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::max()); assert_eq!( spec.module_filters()[2].module_name, Some("crate2".to_string()) ); assert_eq!(spec.module_filters()[2].level_filter, LevelFilter::Debug); assert!( spec.text_filter().is_some() && spec.text_filter().as_ref().unwrap().to_string() == "abc" ); } #[test] fn parse_logging_spec_invalid_crate_filter() { assert!(LogSpecification::parse("crate1::mod1=error=warn,crate2=debug/a.c").is_err()); } #[test] fn parse_logging_spec_invalid_crate_with_dash() { assert!(LogSpecification::parse("karl-heinz::mod1=warn,crate2=debug/a.c").is_err()); } #[test] #[cfg(feature = "textfilter")] fn parse_logging_spec_empty_with_filter() { let spec = LogSpecification::parse("crate1/a*c").unwrap(); assert_eq!(spec.module_filters().len(), 1); assert_eq!( spec.module_filters()[0].module_name, Some("crate1".to_string()) ); assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::max()); assert!( spec.text_filter().is_some() && spec.text_filter().as_ref().unwrap().to_string() == "a*c" ); } #[test] fn reuse_logspec_builder() { let mut builder = crate::LogSpecBuilder::new(); builder.default(LevelFilter::Info); builder.module("carlo", LevelFilter::Debug); builder.module("toni", LevelFilter::Warn); let spec1 = builder.build(); assert_eq!( spec1.module_filters()[0].module_name, Some("carlo".to_string()) ); assert_eq!(spec1.module_filters()[0].level_filter, LevelFilter::Debug); assert_eq!( spec1.module_filters()[1].module_name, Some("toni".to_string()) ); assert_eq!(spec1.module_filters()[1].level_filter, LevelFilter::Warn); assert_eq!(spec1.module_filters().len(), 3); assert_eq!(spec1.module_filters()[2].module_name, None); assert_eq!(spec1.module_filters()[2].level_filter, LevelFilter::Info); builder.default(LevelFilter::Error); builder.remove("carlo"); builder.module("greta", LevelFilter::Trace); let spec2 = builder.build(); assert_eq!(spec2.module_filters().len(), 3); assert_eq!(spec2.module_filters()[2].module_name, None); assert_eq!(spec2.module_filters()[2].level_filter, LevelFilter::Error); assert_eq!( spec2.module_filters()[0].module_name, Some("greta".to_string()) ); assert_eq!(spec2.module_filters()[0].level_filter, LevelFilter::Trace); assert_eq!( spec2.module_filters()[1].module_name, Some("toni".to_string()) ); assert_eq!(spec2.module_filters()[1].level_filter, LevelFilter::Warn); } /////////////////////////////////////////////////////// /////////////////////////////////////////////////////// #[test] fn match_full_path() { let spec = LogSpecification::parse("crate2=info,crate1::mod1=warn").unwrap(); assert!(spec.enabled(Level::Warn, "crate1::mod1")); assert!(!spec.enabled(Level::Info, "crate1::mod1")); assert!(spec.enabled(Level::Info, "crate2")); assert!(!spec.enabled(Level::Debug, "crate2")); } #[test] fn no_match() { let spec = LogSpecification::parse("crate2=info,crate1::mod1=warn").unwrap(); assert!(!spec.enabled(Level::Warn, "crate3")); } #[test] fn match_beginning() { let spec = LogSpecification::parse("crate2=info,crate1::mod1=warn").unwrap(); assert!(spec.enabled(Level::Info, "crate2::mod1")); } #[test] fn match_beginning_longest_match() { let spec = LogSpecification::parse( "abcd = info, abcd::mod1 = error, klmn::mod = debug, klmn = info", ) .unwrap(); assert!(spec.enabled(Level::Error, "abcd::mod1::foo")); assert!(!spec.enabled(Level::Warn, "abcd::mod1::foo")); assert!(spec.enabled(Level::Warn, "abcd::mod2::foo")); assert!(!spec.enabled(Level::Debug, "abcd::mod2::foo")); assert!(!spec.enabled(Level::Debug, "klmn")); assert!(!spec.enabled(Level::Debug, "klmn::foo::bar")); assert!(spec.enabled(Level::Info, "klmn::foo::bar")); } #[test] fn match_default1() { let spec = LogSpecification::parse("info,abcd::mod1=warn").unwrap(); assert!(spec.enabled(Level::Warn, "abcd::mod1")); assert!(spec.enabled(Level::Info, "crate2::mod2")); } #[test] fn match_default2() { let spec = LogSpecification::parse("modxyz=error, info, abcd::mod1=warn").unwrap(); assert!(spec.enabled(Level::Warn, "abcd::mod1")); assert!(spec.enabled(Level::Info, "crate2::mod2")); } #[test] fn rocket() { let spec = LogSpecification::parse("info, rocket=off, serenity=off").unwrap(); assert!(spec.enabled(Level::Info, "itsme")); assert!(spec.enabled(Level::Warn, "abcd::mod1")); assert!(!spec.enabled(Level::Debug, "abcd::mod1")); assert!(!spec.enabled(Level::Error, "rocket::rocket")); assert!(!spec.enabled(Level::Warn, "rocket::rocket")); assert!(!spec.enabled(Level::Info, "rocket::rocket")); } #[test] fn zero_level() { let spec = LogSpecification::parse("info,crate1::mod1=off").unwrap(); assert!(!spec.enabled(Level::Error, "crate1::mod1")); assert!(spec.enabled(Level::Info, "crate2::mod2")); } } #[cfg(test)] #[cfg(feature = "specfile")] mod test_with_specfile { #[cfg(feature = "specfile")] use crate::LogSpecification; #[test] fn specfile() { compare_specs( "[modules]\n\ ", "", ); compare_specs( "global_level = 'info'\n\ \n\ [modules]\n\ ", "info", ); compare_specs( "global_level = 'info'\n\ \n\ [modules]\n\ 'mod1::mod2' = 'debug'\n\ 'mod3' = 'trace'\n\ ", "info, mod1::mod2 = debug, mod3 = trace", ); compare_specs( "global_level = 'info'\n\ global_pattern = 'Foo'\n\ \n\ [modules]\n\ 'mod1::mod2' = 'debug'\n\ 'mod3' = 'trace'\n\ ", "info, mod1::mod2 = debug, mod3 = trace /Foo", ); } #[cfg(feature = "specfile")] fn compare_specs(s1: &str, s2: &str) { let ls1 = LogSpecification::from_toml(s1).unwrap(); let ls2 = LogSpecification::parse(s2).unwrap(); assert_eq!(ls1.module_filters, ls2.module_filters); assert_eq!(ls1.textfilter.is_none(), ls2.textfilter.is_none()); if ls1.textfilter.is_some() && ls2.textfilter.is_some() { assert_eq!( ls1.textfilter.unwrap().to_string(), ls2.textfilter.unwrap().to_string() ); } } }
33.713134
104
0.548235
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn parse_logging_spec_valid() {\n let spec = LogSpecification::parse(\"crate1::mod1=error,crate1::mod2,crate2=debug\").unwrap();\n assert_eq!(spec.module_filters().len(), 3);\n assert_eq!(\n spec.module_filters()[0].module_name,\n Some(\"crate1::mod1\".to_string())\n );\n assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Error);\n\n assert_eq!(\n spec.module_filters()[1].module_name,\n Some(\"crate1::mod2\".to_string())\n );\n assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::max());\n\n assert_eq!(\n spec.module_filters()[2].module_name,\n Some(\"crate2\".to_string())\n );\n assert_eq!(spec.module_filters()[2].level_filter, LevelFilter::Debug);\n\n #[cfg(feature = \"textfilter\")]\n assert!(spec.text_filter().is_none());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn parse_logging_spec_invalid_crate() {\n // test parse_logging_spec with multiple = in specification\n assert!(LogSpecification::parse(\"crate1::mod1=warn=info,crate2=debug\").is_err());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn parse_logging_spec_wrong_log_level() {\n assert!(LogSpecification::parse(\"crate1::mod1=wrong, crate2=warn\").is_err());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn parse_logging_spec_empty_log_level() {\n assert!(LogSpecification::parse(\"crate1::mod1=wrong, crate2=\").is_err());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn parse_logging_spec_global() {\n let spec = LogSpecification::parse(\"warn,crate2=debug\").unwrap();\n assert_eq!(spec.module_filters().len(), 2);\n\n assert_eq!(spec.module_filters()[1].module_name, None);\n assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::Warn);\n\n assert_eq!(\n spec.module_filters()[0].module_name,\n Some(\"crate2\".to_string())\n );\n assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Debug);\n\n #[cfg(feature = \"textfilter\")]\n assert!(spec.text_filter().is_none());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn parse_logging_spec_valid_filter() {\n let spec = LogSpecification::parse(\" crate1::mod1 = error , crate1::mod2,crate2=debug/abc\")\n .unwrap();\n assert_eq!(spec.module_filters().len(), 3);\n\n assert_eq!(\n spec.module_filters()[0].module_name,\n Some(\"crate1::mod1\".to_string())\n );\n assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Error);\n\n assert_eq!(\n spec.module_filters()[1].module_name,\n Some(\"crate1::mod2\".to_string())\n );\n assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::max());\n\n assert_eq!(\n spec.module_filters()[2].module_name,\n Some(\"crate2\".to_string())\n );\n assert_eq!(spec.module_filters()[2].level_filter, LevelFilter::Debug);\n assert!(\n spec.text_filter().is_some()\n && spec.text_filter().as_ref().unwrap().to_string() == \"abc\"\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn parse_logging_spec_invalid_crate_filter() {\n assert!(LogSpecification::parse(\"crate1::mod1=error=warn,crate2=debug/a.c\").is_err());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn parse_logging_spec_invalid_crate_with_dash() {\n assert!(LogSpecification::parse(\"karl-heinz::mod1=warn,crate2=debug/a.c\").is_err());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn parse_logging_spec_empty_with_filter() {\n let spec = LogSpecification::parse(\"crate1/a*c\").unwrap();\n assert_eq!(spec.module_filters().len(), 1);\n assert_eq!(\n spec.module_filters()[0].module_name,\n Some(\"crate1\".to_string())\n );\n assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::max());\n assert!(\n spec.text_filter().is_some()\n && spec.text_filter().as_ref().unwrap().to_string() == \"a*c\"\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn reuse_logspec_builder() {\n let mut builder = crate::LogSpecBuilder::new();\n\n builder.default(LevelFilter::Info);\n builder.module(\"carlo\", LevelFilter::Debug);\n builder.module(\"toni\", LevelFilter::Warn);\n let spec1 = builder.build();\n\n assert_eq!(\n spec1.module_filters()[0].module_name,\n Some(\"carlo\".to_string())\n );\n assert_eq!(spec1.module_filters()[0].level_filter, LevelFilter::Debug);\n\n assert_eq!(\n spec1.module_filters()[1].module_name,\n Some(\"toni\".to_string())\n );\n assert_eq!(spec1.module_filters()[1].level_filter, LevelFilter::Warn);\n\n assert_eq!(spec1.module_filters().len(), 3);\n assert_eq!(spec1.module_filters()[2].module_name, None);\n assert_eq!(spec1.module_filters()[2].level_filter, LevelFilter::Info);\n\n builder.default(LevelFilter::Error);\n builder.remove(\"carlo\");\n builder.module(\"greta\", LevelFilter::Trace);\n let spec2 = builder.build();\n\n assert_eq!(spec2.module_filters().len(), 3);\n assert_eq!(spec2.module_filters()[2].module_name, None);\n assert_eq!(spec2.module_filters()[2].level_filter, LevelFilter::Error);\n\n assert_eq!(\n spec2.module_filters()[0].module_name,\n Some(\"greta\".to_string())\n );\n assert_eq!(spec2.module_filters()[0].level_filter, LevelFilter::Trace);\n\n assert_eq!(\n spec2.module_filters()[1].module_name,\n Some(\"toni\".to_string())\n );\n assert_eq!(spec2.module_filters()[1].level_filter, LevelFilter::Warn);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn match_full_path() {\n let spec = LogSpecification::parse(\"crate2=info,crate1::mod1=warn\").unwrap();\n assert!(spec.enabled(Level::Warn, \"crate1::mod1\"));\n assert!(!spec.enabled(Level::Info, \"crate1::mod1\"));\n assert!(spec.enabled(Level::Info, \"crate2\"));\n assert!(!spec.enabled(Level::Debug, \"crate2\"));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn no_match() {\n let spec = LogSpecification::parse(\"crate2=info,crate1::mod1=warn\").unwrap();\n assert!(!spec.enabled(Level::Warn, \"crate3\"));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn match_beginning() {\n let spec = LogSpecification::parse(\"crate2=info,crate1::mod1=warn\").unwrap();\n assert!(spec.enabled(Level::Info, \"crate2::mod1\"));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn match_beginning_longest_match() {\n let spec = LogSpecification::parse(\n \"abcd = info, abcd::mod1 = error, klmn::mod = debug, klmn = info\",\n )\n .unwrap();\n assert!(spec.enabled(Level::Error, \"abcd::mod1::foo\"));\n assert!(!spec.enabled(Level::Warn, \"abcd::mod1::foo\"));\n assert!(spec.enabled(Level::Warn, \"abcd::mod2::foo\"));\n assert!(!spec.enabled(Level::Debug, \"abcd::mod2::foo\"));\n\n assert!(!spec.enabled(Level::Debug, \"klmn\"));\n assert!(!spec.enabled(Level::Debug, \"klmn::foo::bar\"));\n assert!(spec.enabled(Level::Info, \"klmn::foo::bar\"));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn match_default1() {\n let spec = LogSpecification::parse(\"info,abcd::mod1=warn\").unwrap();\n assert!(spec.enabled(Level::Warn, \"abcd::mod1\"));\n assert!(spec.enabled(Level::Info, \"crate2::mod2\"));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn match_default2() {\n let spec = LogSpecification::parse(\"modxyz=error, info, abcd::mod1=warn\").unwrap();\n assert!(spec.enabled(Level::Warn, \"abcd::mod1\"));\n assert!(spec.enabled(Level::Info, \"crate2::mod2\"));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn rocket() {\n let spec = LogSpecification::parse(\"info, rocket=off, serenity=off\").unwrap();\n assert!(spec.enabled(Level::Info, \"itsme\"));\n assert!(spec.enabled(Level::Warn, \"abcd::mod1\"));\n assert!(!spec.enabled(Level::Debug, \"abcd::mod1\"));\n assert!(!spec.enabled(Level::Error, \"rocket::rocket\"));\n assert!(!spec.enabled(Level::Warn, \"rocket::rocket\"));\n assert!(!spec.enabled(Level::Info, \"rocket::rocket\"));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn zero_level() {\n let spec = LogSpecification::parse(\"info,crate1::mod1=off\").unwrap();\n assert!(!spec.enabled(Level::Error, \"crate1::mod1\"));\n assert!(spec.enabled(Level::Info, \"crate2::mod2\"));\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn specfile() {\n compare_specs(\n \"[modules]\\n\\\n \",\n \"\",\n );\n\n compare_specs(\n \"global_level = 'info'\\n\\\n \\n\\\n [modules]\\n\\\n \",\n \"info\",\n );\n\n compare_specs(\n \"global_level = 'info'\\n\\\n \\n\\\n [modules]\\n\\\n 'mod1::mod2' = 'debug'\\n\\\n 'mod3' = 'trace'\\n\\\n \",\n \"info, mod1::mod2 = debug, mod3 = trace\",\n );\n\n compare_specs(\n \"global_level = 'info'\\n\\\n global_pattern = 'Foo'\\n\\\n \\n\\\n [modules]\\n\\\n 'mod1::mod2' = 'debug'\\n\\\n 'mod3' = 'trace'\\n\\\n \",\n \"info, mod1::mod2 = debug, mod3 = trace /Foo\",\n );\n }\n}" ]
f70dfe34c32ffec9981c08d030fea8ac667d5b78
1,583
rs
Rust
src/test/instruction_tests/instr_ucomiss.rs
epakskape/rust-x86asm
adb4128f7b12642336a919e32bd56509c9a835d4
[ "MIT" ]
49
2017-10-31T10:26:54.000Z
2021-07-06T09:04:12.000Z
src/test/instruction_tests/instr_ucomiss.rs
epakskape/rust-x86asm
adb4128f7b12642336a919e32bd56509c9a835d4
[ "MIT" ]
6
2018-02-28T05:57:28.000Z
2020-01-05T01:54:41.000Z
src/test/instruction_tests/instr_ucomiss.rs
epakskape/rust-x86asm
adb4128f7b12642336a919e32bd56509c9a835d4
[ "MIT" ]
7
2018-09-09T13:08:16.000Z
2020-06-14T00:06:07.000Z
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; use ::RegType::*; use ::instruction_def::*; use ::Operand::*; use ::Reg::*; use ::RegScale::*; use ::test::run_test; #[test] fn ucomiss_1() { run_test(&Instruction { mnemonic: Mnemonic::UCOMISS, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM7)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 46, 207], OperandSize::Dword) } #[test] fn ucomiss_2() { run_test(&Instruction { mnemonic: Mnemonic::UCOMISS, operand1: Some(Direct(XMM7)), operand2: Some(IndirectScaledIndexedDisplaced(EBX, EBX, Eight, 1717669082, Some(OperandSize::Dword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 46, 188, 219, 218, 140, 97, 102], OperandSize::Dword) } #[test] fn ucomiss_3() { run_test(&Instruction { mnemonic: Mnemonic::UCOMISS, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM7)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 46, 215], OperandSize::Qword) } #[test] fn ucomiss_4() { run_test(&Instruction { mnemonic: Mnemonic::UCOMISS, operand1: Some(Direct(XMM2)), operand2: Some(Indirect(RSI, Some(OperandSize::Dword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 46, 22], OperandSize::Qword) }
54.586207
382
0.706254
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn ucomiss_1() {\n run_test(&Instruction { mnemonic: Mnemonic::UCOMISS, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM7)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 46, 207], OperandSize::Dword)\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn ucomiss_2() {\n run_test(&Instruction { mnemonic: Mnemonic::UCOMISS, operand1: Some(Direct(XMM7)), operand2: Some(IndirectScaledIndexedDisplaced(EBX, EBX, Eight, 1717669082, Some(OperandSize::Dword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 46, 188, 219, 218, 140, 97, 102], OperandSize::Dword)\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn ucomiss_3() {\n run_test(&Instruction { mnemonic: Mnemonic::UCOMISS, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM7)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 46, 215], OperandSize::Qword)\n}\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn ucomiss_4() {\n run_test(&Instruction { mnemonic: Mnemonic::UCOMISS, operand1: Some(Direct(XMM2)), operand2: Some(Indirect(RSI, Some(OperandSize::Dword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 46, 22], OperandSize::Qword)\n}\n}" ]
f70e4881bdb5a408a540a0fae2a3e59e0be26bc3
7,080
rs
Rust
molt/src/util.rs
rfdonnelly/molt
872fc769c1290cd5f0bc775262b431448c9e1d06
[ "BSD-3-Clause" ]
null
null
null
molt/src/util.rs
rfdonnelly/molt
872fc769c1290cd5f0bc775262b431448c9e1d06
[ "BSD-3-Clause" ]
null
null
null
molt/src/util.rs
rfdonnelly/molt
872fc769c1290cd5f0bc775262b431448c9e1d06
[ "BSD-3-Clause" ]
null
null
null
//! Internal Utilities //! //! This module contains function for use by molt only. use crate::tokenizer::Tokenizer; pub fn is_varname_char(ch: char) -> bool { ch.is_alphanumeric() || ch == '_' } /// Reads the integer string from the head of the input. If the function returns `Some`, /// the value is the integer string that was read, and the `ptr` points to the following /// character. Otherwise the `ptr` will be unchanged. /// /// The string may consist of: /// /// * A unary plus or minus /// * One or more decimal digits. /// /// ## Notes /// /// * The resulting string has the form of an integer, but might be out of the valid range. pub fn read_int(ptr: &mut Tokenizer) -> Option<String> { let mut p = ptr.clone(); let mut result = String::new(); let mut missing_digits = true; // FIRST, skip a unary operator. if p.is('+') || p.is('-') { result.push(p.next().unwrap()); } // NEXT, skip a "0x". let mut radix = 10; if p.is('0') { result.push(p.next().unwrap()); if p.is('x') { result.push(p.next().unwrap()); radix = 16; } else { missing_digits = false; } } // NEXT, read the digits while p.has(|ch| ch.is_digit(radix)) { missing_digits = false; result.push(p.next().unwrap()); } if result.is_empty() || missing_digits { None } else { ptr.skip_over(result.len()); Some(result) } } /// Reads the floating point string from the head of the input. If the function returns `Some`, /// the value is the string that was read, and the `ptr` points to the following character. /// Otherwise the `ptr` will be unchanged. /// /// The string will consist of: /// /// * Possibly, a unary plus/minus /// * "Inf" (case insensitive), -OR- /// * A number: /// * Some number of decimal digits, optionally containing a ".". /// * An optional exponent beginning with "e" or "E" /// * The exponent may contain a + or -, followed by some number of digits. /// /// ## Notes /// /// * The resulting string has the form of a floating point number but might be out of the /// valid range. pub fn read_float(ptr: &mut Tokenizer) -> Option<String> { let mut p = ptr.clone(); let mut result = String::new(); let mut missing_mantissa = true; let mut missing_exponent = false; // FIRST, skip a unary operator. if p.is('+') || p.is('-') { result.push(p.next().unwrap()); } // NEXT, looking for Inf if p.is('I') || p.is('i') { result.push(p.next().unwrap()); if p.is('N') || p.is('n') { result.push(p.next().unwrap()); } else { return None; } if p.is('F') || p.is('f') { result.push(p.next().unwrap()); // Update the pointer. ptr.skip_over(result.len()); return Some(result); } else { return None; } } // NEXT, get any integer digits while p.has(|ch| ch.is_digit(10)) { missing_mantissa = false; result.push(p.next().unwrap()); } // NEXT, get any fractional part. if p.is('.') { result.push(p.next().unwrap()); while p.has(|ch| ch.is_digit(10)) { missing_mantissa = false; result.push(p.next().unwrap()); } } // NEXT, get any exponent. if p.is('e') || p.is('E') { missing_exponent = true; result.push(p.next().unwrap()); if p.is('+') || p.is('-') { result.push(p.next().unwrap()); } while p.has(|ch| ch.is_digit(10)) { missing_exponent = false; result.push(p.next().unwrap()); } } if result.is_empty() || missing_mantissa || missing_exponent { None } else { // Update the pointer. ptr.skip_over(result.len()); Some(result) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_util_read_int() { let mut p = Tokenizer::new("abc"); assert_eq!(None, read_int(&mut p)); assert_eq!(Some('a'), p.peek()); let mut p = Tokenizer::new("-abc"); assert_eq!(None, read_int(&mut p)); assert_eq!(Some('-'), p.peek()); let mut p = Tokenizer::new("+abc"); assert_eq!(None, read_int(&mut p)); assert_eq!(Some('+'), p.peek()); let mut p = Tokenizer::new("123"); assert_eq!(Some("123".into()), read_int(&mut p)); assert_eq!(None, p.peek()); let mut p = Tokenizer::new("123abc"); assert_eq!(Some("123".into()), read_int(&mut p)); assert_eq!(Some('a'), p.peek()); let mut p = Tokenizer::new("+123abc"); assert_eq!(Some("+123".into()), read_int(&mut p)); assert_eq!(Some('a'), p.peek()); let mut p = Tokenizer::new("-123abc"); assert_eq!(Some("-123".into()), read_int(&mut p)); assert_eq!(Some('a'), p.peek()); } #[test] #[allow(clippy::cognitive_complexity)] fn test_util_read_float() { let mut p = Tokenizer::new("abc"); assert_eq!(None, read_float(&mut p)); assert_eq!(Some('a'), p.peek()); let mut p = Tokenizer::new("-abc"); assert_eq!(None, read_float(&mut p)); assert_eq!(Some('-'), p.peek()); let mut p = Tokenizer::new("+abc"); assert_eq!(None, read_float(&mut p)); assert_eq!(Some('+'), p.peek()); let mut p = Tokenizer::new("123"); assert_eq!(Some("123".into()), read_float(&mut p)); assert_eq!(None, p.peek()); let mut p = Tokenizer::new("123abc"); assert_eq!(Some("123".into()), read_float(&mut p)); assert_eq!(Some('a'), p.peek()); let mut p = Tokenizer::new("123."); assert_eq!(Some("123.".into()), read_float(&mut p)); assert_eq!(None, p.peek()); let mut p = Tokenizer::new(".123"); assert_eq!(Some(".123".into()), read_float(&mut p)); assert_eq!(None, p.peek()); let mut p = Tokenizer::new("123.123"); assert_eq!(Some("123.123".into()), read_float(&mut p)); assert_eq!(None, p.peek()); let mut p = Tokenizer::new("1e5"); assert_eq!(Some("1e5".into()), read_float(&mut p)); assert_eq!(None, p.peek()); let mut p = Tokenizer::new("1e+5"); assert_eq!(Some("1e+5".into()), read_float(&mut p)); assert_eq!(None, p.peek()); let mut p = Tokenizer::new("1e-5"); assert_eq!(Some("1e-5".into()), read_float(&mut p)); assert_eq!(None, p.peek()); let mut p = Tokenizer::new("1.1e1a"); assert_eq!(Some("1.1e1".into()), read_float(&mut p)); assert_eq!(Some('a'), p.peek()); let mut p = Tokenizer::new("+123abc"); assert_eq!(Some("+123".into()), read_float(&mut p)); assert_eq!(Some('a'), p.peek()); let mut p = Tokenizer::new("-123abc"); assert_eq!(Some("-123".into()), read_float(&mut p)); assert_eq!(Some('a'), p.peek()); } }
28.897959
96
0.535169
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_util_read_int() {\n let mut p = Tokenizer::new(\"abc\");\n assert_eq!(None, read_int(&mut p));\n assert_eq!(Some('a'), p.peek());\n\n let mut p = Tokenizer::new(\"-abc\");\n assert_eq!(None, read_int(&mut p));\n assert_eq!(Some('-'), p.peek());\n\n let mut p = Tokenizer::new(\"+abc\");\n assert_eq!(None, read_int(&mut p));\n assert_eq!(Some('+'), p.peek());\n\n let mut p = Tokenizer::new(\"123\");\n assert_eq!(Some(\"123\".into()), read_int(&mut p));\n assert_eq!(None, p.peek());\n\n let mut p = Tokenizer::new(\"123abc\");\n assert_eq!(Some(\"123\".into()), read_int(&mut p));\n assert_eq!(Some('a'), p.peek());\n\n let mut p = Tokenizer::new(\"+123abc\");\n assert_eq!(Some(\"+123\".into()), read_int(&mut p));\n assert_eq!(Some('a'), p.peek());\n\n let mut p = Tokenizer::new(\"-123abc\");\n assert_eq!(Some(\"-123\".into()), read_int(&mut p));\n assert_eq!(Some('a'), p.peek());\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_util_read_float() {\n let mut p = Tokenizer::new(\"abc\");\n assert_eq!(None, read_float(&mut p));\n assert_eq!(Some('a'), p.peek());\n\n let mut p = Tokenizer::new(\"-abc\");\n assert_eq!(None, read_float(&mut p));\n assert_eq!(Some('-'), p.peek());\n\n let mut p = Tokenizer::new(\"+abc\");\n assert_eq!(None, read_float(&mut p));\n assert_eq!(Some('+'), p.peek());\n\n let mut p = Tokenizer::new(\"123\");\n assert_eq!(Some(\"123\".into()), read_float(&mut p));\n assert_eq!(None, p.peek());\n\n let mut p = Tokenizer::new(\"123abc\");\n assert_eq!(Some(\"123\".into()), read_float(&mut p));\n assert_eq!(Some('a'), p.peek());\n\n let mut p = Tokenizer::new(\"123.\");\n assert_eq!(Some(\"123.\".into()), read_float(&mut p));\n assert_eq!(None, p.peek());\n\n let mut p = Tokenizer::new(\".123\");\n assert_eq!(Some(\".123\".into()), read_float(&mut p));\n assert_eq!(None, p.peek());\n\n let mut p = Tokenizer::new(\"123.123\");\n assert_eq!(Some(\"123.123\".into()), read_float(&mut p));\n assert_eq!(None, p.peek());\n\n let mut p = Tokenizer::new(\"1e5\");\n assert_eq!(Some(\"1e5\".into()), read_float(&mut p));\n assert_eq!(None, p.peek());\n\n let mut p = Tokenizer::new(\"1e+5\");\n assert_eq!(Some(\"1e+5\".into()), read_float(&mut p));\n assert_eq!(None, p.peek());\n\n let mut p = Tokenizer::new(\"1e-5\");\n assert_eq!(Some(\"1e-5\".into()), read_float(&mut p));\n assert_eq!(None, p.peek());\n\n let mut p = Tokenizer::new(\"1.1e1a\");\n assert_eq!(Some(\"1.1e1\".into()), read_float(&mut p));\n assert_eq!(Some('a'), p.peek());\n\n let mut p = Tokenizer::new(\"+123abc\");\n assert_eq!(Some(\"+123\".into()), read_float(&mut p));\n assert_eq!(Some('a'), p.peek());\n\n let mut p = Tokenizer::new(\"-123abc\");\n assert_eq!(Some(\"-123\".into()), read_float(&mut p));\n assert_eq!(Some('a'), p.peek());\n }\n}" ]
f70e5f429013b177c34272f9f70d21669efdc248
29,196
rs
Rust
src/postings/segment_postings.rs
Frando/tantivy
35236c8634297cbb7dd3302d519d7c1a776439f5
[ "MIT" ]
1
2019-07-19T02:15:02.000Z
2019-07-19T02:15:02.000Z
src/postings/segment_postings.rs
Frando/tantivy
35236c8634297cbb7dd3302d519d7c1a776439f5
[ "MIT" ]
null
null
null
src/postings/segment_postings.rs
Frando/tantivy
35236c8634297cbb7dd3302d519d7c1a776439f5
[ "MIT" ]
null
null
null
use crate::common::BitSet; use crate::common::HasLen; use crate::common::{BinarySerializable, VInt}; use crate::docset::{DocSet, SkipResult}; use crate::positions::PositionReader; use crate::postings::compression::{compressed_block_size, AlignedBuffer}; use crate::postings::compression::{BlockDecoder, VIntDecoder, COMPRESSION_BLOCK_SIZE}; use crate::postings::serializer::PostingsSerializer; use crate::postings::BlockSearcher; use crate::postings::FreqReadingOption; use crate::postings::Postings; use crate::postings::SkipReader; use crate::postings::USE_SKIP_INFO_LIMIT; use crate::schema::IndexRecordOption; use crate::DocId; use owned_read::OwnedRead; use std::cmp::Ordering; use tantivy_fst::Streamer; struct PositionComputer { // store the amount of position int // before reading positions. // // if none, position are already loaded in // the positions vec. position_to_skip: usize, position_reader: PositionReader, } impl PositionComputer { pub fn new(position_reader: PositionReader) -> PositionComputer { PositionComputer { position_to_skip: 0, position_reader, } } pub fn add_skip(&mut self, num_skip: usize) { self.position_to_skip += num_skip; } // Positions can only be read once. pub fn positions_with_offset(&mut self, offset: u32, output: &mut [u32]) { self.position_reader.skip(self.position_to_skip); self.position_to_skip = 0; self.position_reader.read(output); let mut cum = offset; for output_mut in output.iter_mut() { cum += *output_mut; *output_mut = cum; } } } /// `SegmentPostings` represents the inverted list or postings associated to /// a term in a `Segment`. /// /// As we iterate through the `SegmentPostings`, the frequencies are optionally decoded. /// Positions on the other hand, are optionally entirely decoded upfront. pub struct SegmentPostings { block_cursor: BlockSegmentPostings, cur: usize, position_computer: Option<PositionComputer>, block_searcher: BlockSearcher, } impl SegmentPostings { /// Returns an empty segment postings object pub fn empty() -> Self { let empty_block_cursor = BlockSegmentPostings::empty(); SegmentPostings { block_cursor: empty_block_cursor, cur: COMPRESSION_BLOCK_SIZE, position_computer: None, block_searcher: BlockSearcher::default(), } } /// Creates a segment postings object with the given documents /// and no frequency encoded. /// /// This method is mostly useful for unit tests. /// /// It serializes the doc ids using tantivy's codec /// and returns a `SegmentPostings` object that embeds a /// buffer with the serialized data. pub fn create_from_docs(docs: &[u32]) -> SegmentPostings { let mut buffer = Vec::new(); { let mut postings_serializer = PostingsSerializer::new(&mut buffer, false, false); for &doc in docs { postings_serializer.write_doc(doc, 1u32); } postings_serializer .close_term(docs.len() as u32) .expect("In memory Serialization should never fail."); } let block_segment_postings = BlockSegmentPostings::from_data( docs.len() as u32, OwnedRead::new(buffer), IndexRecordOption::Basic, IndexRecordOption::Basic, ); SegmentPostings::from_block_postings(block_segment_postings, None) } } impl SegmentPostings { /// Reads a Segment postings from an &[u8] /// /// * `len` - number of document in the posting lists. /// * `data` - data array. The complete data is not necessarily used. /// * `freq_handler` - the freq handler is in charge of decoding /// frequencies and/or positions pub(crate) fn from_block_postings( segment_block_postings: BlockSegmentPostings, positions_stream_opt: Option<PositionReader>, ) -> SegmentPostings { SegmentPostings { block_cursor: segment_block_postings, cur: COMPRESSION_BLOCK_SIZE, // cursor within the block position_computer: positions_stream_opt.map(PositionComputer::new), block_searcher: BlockSearcher::default(), } } } impl DocSet for SegmentPostings { // goes to the next element. // next needs to be called a first time to point to the correct element. #[inline] fn advance(&mut self) -> bool { if self.position_computer.is_some() && self.cur < COMPRESSION_BLOCK_SIZE { let term_freq = self.term_freq() as usize; if let Some(position_computer) = self.position_computer.as_mut() { position_computer.add_skip(term_freq); } } self.cur += 1; if self.cur >= self.block_cursor.block_len() { self.cur = 0; if !self.block_cursor.advance() { self.cur = COMPRESSION_BLOCK_SIZE; return false; } } true } fn skip_next(&mut self, target: DocId) -> SkipResult { if !self.advance() { return SkipResult::End; } match self.doc().cmp(&target) { Ordering::Equal => { return SkipResult::Reached; } Ordering::Greater => { return SkipResult::OverStep; } _ => { // ... } } // In the following, thanks to the call to advance above, // we know that the position is not loaded and we need // to skip every doc_freq we cross. // skip blocks until one that might contain the target // check if we need to go to the next block let mut sum_freqs_skipped: u32 = 0; if !self .block_cursor .docs() .last() .map(|doc| *doc >= target) .unwrap_or(false) // there should always be at least a document in the block // since advance returned. { // we are not in the right block. // // First compute all of the freqs skipped from the current block. if self.position_computer.is_some() { sum_freqs_skipped = self.block_cursor.freqs()[self.cur..].iter().sum(); match self.block_cursor.skip_to(target) { BlockSegmentPostingsSkipResult::Success(block_skip_freqs) => { sum_freqs_skipped += block_skip_freqs; } BlockSegmentPostingsSkipResult::Terminated => { return SkipResult::End; } } } else if self.block_cursor.skip_to(target) == BlockSegmentPostingsSkipResult::Terminated { // no positions needed. no need to sum freqs. return SkipResult::End; } self.cur = 0; } let cur = self.cur; // we're in the right block now, start with an exponential search let (output, len) = self.block_cursor.docs_aligned(); let new_cur = self .block_searcher .search_in_block(&output, len, cur, target); if let Some(position_computer) = self.position_computer.as_mut() { sum_freqs_skipped += self.block_cursor.freqs()[cur..new_cur].iter().sum::<u32>(); position_computer.add_skip(sum_freqs_skipped as usize); } self.cur = new_cur; // `doc` is now the first element >= `target` let doc = output.0[new_cur]; debug_assert!(doc >= target); if doc == target { SkipResult::Reached } else { SkipResult::OverStep } } /// Return the current document's `DocId`. /// /// # Panics /// /// Will panics if called without having called advance before. #[inline] fn doc(&self) -> DocId { let docs = self.block_cursor.docs(); debug_assert!( self.cur < docs.len(), "Have you forgotten to call `.advance()` at least once before calling `.doc()` ." ); docs[self.cur] } fn size_hint(&self) -> u32 { self.len() as u32 } fn append_to_bitset(&mut self, bitset: &mut BitSet) { // finish the current block if self.advance() { for &doc in &self.block_cursor.docs()[self.cur..] { bitset.insert(doc); } // ... iterate through the remaining blocks. while self.block_cursor.advance() { for &doc in self.block_cursor.docs() { bitset.insert(doc); } } } } } impl HasLen for SegmentPostings { fn len(&self) -> usize { self.block_cursor.doc_freq() } } impl Postings for SegmentPostings { /// Returns the frequency associated to the current document. /// If the schema is set up so that no frequency have been encoded, /// this method should always return 1. /// /// # Panics /// /// Will panics if called without having called advance before. fn term_freq(&self) -> u32 { debug_assert!( // Here we do not use the len of `freqs()` // because it is actually ok to request for the freq of doc // even if no frequency were encoded for the field. // // In that case we hit the block just as if the frequency had been // decoded. The block is simply prefilled by the value 1. self.cur < COMPRESSION_BLOCK_SIZE, "Have you forgotten to call `.advance()` at least once before calling \ `.term_freq()`." ); self.block_cursor.freq(self.cur) } fn positions_with_offset(&mut self, offset: u32, output: &mut Vec<u32>) { let term_freq = self.term_freq() as usize; if let Some(position_comp) = self.position_computer.as_mut() { output.resize(term_freq, 0u32); position_comp.positions_with_offset(offset, &mut output[..]); } else { output.clear(); } } } /// `BlockSegmentPostings` is a cursor iterating over blocks /// of documents. /// /// # Warning /// /// While it is useful for some very specific high-performance /// use cases, you should prefer using `SegmentPostings` for most usage. pub struct BlockSegmentPostings { doc_decoder: BlockDecoder, freq_decoder: BlockDecoder, freq_reading_option: FreqReadingOption, doc_freq: usize, doc_offset: DocId, num_vint_docs: usize, remaining_data: OwnedRead, skip_reader: SkipReader, } fn split_into_skips_and_postings( doc_freq: u32, mut data: OwnedRead, ) -> (Option<OwnedRead>, OwnedRead) { if doc_freq >= USE_SKIP_INFO_LIMIT { let skip_len = VInt::deserialize(&mut data).expect("Data corrupted").0 as usize; let mut postings_data = data.clone(); postings_data.advance(skip_len); data.clip(skip_len); (Some(data), postings_data) } else { (None, data) } } #[derive(Debug, Eq, PartialEq)] pub enum BlockSegmentPostingsSkipResult { Terminated, Success(u32), //< number of term freqs to skip } impl BlockSegmentPostings { pub(crate) fn from_data( doc_freq: u32, data: OwnedRead, record_option: IndexRecordOption, requested_option: IndexRecordOption, ) -> BlockSegmentPostings { let freq_reading_option = match (record_option, requested_option) { (IndexRecordOption::Basic, _) => FreqReadingOption::NoFreq, (_, IndexRecordOption::Basic) => FreqReadingOption::SkipFreq, (_, _) => FreqReadingOption::ReadFreq, }; let (skip_data_opt, postings_data) = split_into_skips_and_postings(doc_freq, data); let skip_reader = match skip_data_opt { Some(skip_data) => SkipReader::new(skip_data, record_option), None => SkipReader::new(OwnedRead::new(&[][..]), record_option), }; let doc_freq = doc_freq as usize; let num_vint_docs = doc_freq % COMPRESSION_BLOCK_SIZE; BlockSegmentPostings { num_vint_docs, doc_decoder: BlockDecoder::new(), freq_decoder: BlockDecoder::with_val(1), freq_reading_option, doc_offset: 0, doc_freq, remaining_data: postings_data, skip_reader, } } // Resets the block segment postings on another position // in the postings file. // // This is useful for enumerating through a list of terms, // and consuming the associated posting lists while avoiding // reallocating a `BlockSegmentPostings`. // // # Warning // // This does not reset the positions list. pub(crate) fn reset(&mut self, doc_freq: u32, postings_data: OwnedRead) { let (skip_data_opt, postings_data) = split_into_skips_and_postings(doc_freq, postings_data); let num_vint_docs = (doc_freq as usize) & (COMPRESSION_BLOCK_SIZE - 1); self.num_vint_docs = num_vint_docs; self.remaining_data = postings_data; if let Some(skip_data) = skip_data_opt { self.skip_reader.reset(skip_data); } else { self.skip_reader.reset(OwnedRead::new(&[][..])) } self.doc_offset = 0; self.doc_freq = doc_freq as usize; } /// Returns the document frequency associated to this block postings. /// /// This `doc_freq` is simply the sum of the length of all of the blocks /// length, and it does not take in account deleted documents. pub fn doc_freq(&self) -> usize { self.doc_freq } /// Returns the array of docs in the current block. /// /// Before the first call to `.advance()`, the block /// returned by `.docs()` is empty. #[inline] pub fn docs(&self) -> &[DocId] { self.doc_decoder.output_array() } pub(crate) fn docs_aligned(&self) -> (&AlignedBuffer, usize) { self.doc_decoder.output_aligned() } /// Return the document at index `idx` of the block. #[inline] pub fn doc(&self, idx: usize) -> u32 { self.doc_decoder.output(idx) } /// Return the array of `term freq` in the block. #[inline] pub fn freqs(&self) -> &[u32] { self.freq_decoder.output_array() } /// Return the frequency at index `idx` of the block. #[inline] pub fn freq(&self, idx: usize) -> u32 { self.freq_decoder.output(idx) } /// Returns the length of the current block. /// /// All blocks have a length of `NUM_DOCS_PER_BLOCK`, /// except the last block that may have a length /// of any number between 1 and `NUM_DOCS_PER_BLOCK - 1` #[inline] fn block_len(&self) -> usize { self.doc_decoder.output_len } /// position on a block that may contains `doc_id`. /// Always advance the current block. /// /// Returns true if a block that has an element greater or equal to the target is found. /// Returning true does not guarantee that the smallest element of the block is smaller /// than the target. It only guarantees that the last element is greater or equal. /// /// Returns false iff all of the document remaining are smaller than /// `doc_id`. In that case, all of these document are consumed. /// pub fn skip_to(&mut self, target_doc: DocId) -> BlockSegmentPostingsSkipResult { let mut skip_freqs = 0u32; while self.skip_reader.advance() { if self.skip_reader.doc() >= target_doc { // the last document of the current block is larger // than the target. // // We found our block! let num_bits = self.skip_reader.doc_num_bits(); let num_consumed_bytes = self.doc_decoder.uncompress_block_sorted( self.remaining_data.as_ref(), self.doc_offset, num_bits, ); self.remaining_data.advance(num_consumed_bytes); let tf_num_bits = self.skip_reader.tf_num_bits(); match self.freq_reading_option { FreqReadingOption::NoFreq => {} FreqReadingOption::SkipFreq => { let num_bytes_to_skip = compressed_block_size(tf_num_bits); self.remaining_data.advance(num_bytes_to_skip); } FreqReadingOption::ReadFreq => { let num_consumed_bytes = self .freq_decoder .uncompress_block_unsorted(self.remaining_data.as_ref(), tf_num_bits); self.remaining_data.advance(num_consumed_bytes); } } self.doc_offset = self.skip_reader.doc(); return BlockSegmentPostingsSkipResult::Success(skip_freqs); } else { skip_freqs += self.skip_reader.tf_sum(); let advance_len = self.skip_reader.total_block_len(); self.doc_offset = self.skip_reader.doc(); self.remaining_data.advance(advance_len); } } // we are now on the last, incomplete, variable encoded block. if self.num_vint_docs > 0 { let num_compressed_bytes = self.doc_decoder.uncompress_vint_sorted( self.remaining_data.as_ref(), self.doc_offset, self.num_vint_docs, ); self.remaining_data.advance(num_compressed_bytes); match self.freq_reading_option { FreqReadingOption::NoFreq | FreqReadingOption::SkipFreq => {} FreqReadingOption::ReadFreq => { self.freq_decoder .uncompress_vint_unsorted(self.remaining_data.as_ref(), self.num_vint_docs); } } self.num_vint_docs = 0; return self .docs() .last() .map(|last_doc| { if *last_doc >= target_doc { BlockSegmentPostingsSkipResult::Success(skip_freqs) } else { BlockSegmentPostingsSkipResult::Terminated } }) .unwrap_or(BlockSegmentPostingsSkipResult::Terminated); } BlockSegmentPostingsSkipResult::Terminated } /// Advance to the next block. /// /// Returns false iff there was no remaining blocks. pub fn advance(&mut self) -> bool { if self.skip_reader.advance() { let num_bits = self.skip_reader.doc_num_bits(); let num_consumed_bytes = self.doc_decoder.uncompress_block_sorted( self.remaining_data.as_ref(), self.doc_offset, num_bits, ); self.remaining_data.advance(num_consumed_bytes); let tf_num_bits = self.skip_reader.tf_num_bits(); match self.freq_reading_option { FreqReadingOption::NoFreq => {} FreqReadingOption::SkipFreq => { let num_bytes_to_skip = compressed_block_size(tf_num_bits); self.remaining_data.advance(num_bytes_to_skip); } FreqReadingOption::ReadFreq => { let num_consumed_bytes = self .freq_decoder .uncompress_block_unsorted(self.remaining_data.as_ref(), tf_num_bits); self.remaining_data.advance(num_consumed_bytes); } } // it will be used as the next offset. self.doc_offset = self.doc_decoder.output(COMPRESSION_BLOCK_SIZE - 1); true } else if self.num_vint_docs > 0 { let num_compressed_bytes = self.doc_decoder.uncompress_vint_sorted( self.remaining_data.as_ref(), self.doc_offset, self.num_vint_docs, ); self.remaining_data.advance(num_compressed_bytes); match self.freq_reading_option { FreqReadingOption::NoFreq | FreqReadingOption::SkipFreq => {} FreqReadingOption::ReadFreq => { self.freq_decoder .uncompress_vint_unsorted(self.remaining_data.as_ref(), self.num_vint_docs); } } self.num_vint_docs = 0; true } else { false } } /// Returns an empty segment postings object pub fn empty() -> BlockSegmentPostings { BlockSegmentPostings { num_vint_docs: 0, doc_decoder: BlockDecoder::new(), freq_decoder: BlockDecoder::with_val(1), freq_reading_option: FreqReadingOption::NoFreq, doc_offset: 0, doc_freq: 0, remaining_data: OwnedRead::new(vec![]), skip_reader: SkipReader::new(OwnedRead::new(vec![]), IndexRecordOption::Basic), } } } impl<'b> Streamer<'b> for BlockSegmentPostings { type Item = &'b [DocId]; fn next(&'b mut self) -> Option<&'b [DocId]> { if self.advance() { Some(self.docs()) } else { None } } } #[cfg(test)] mod tests { use super::BlockSegmentPostings; use super::BlockSegmentPostingsSkipResult; use super::SegmentPostings; use crate::common::HasLen; use crate::core::Index; use crate::docset::DocSet; use crate::postings::postings::Postings; use crate::schema::IndexRecordOption; use crate::schema::Schema; use crate::schema::Term; use crate::schema::INDEXED; use crate::DocId; use crate::SkipResult; use tantivy_fst::Streamer; #[test] fn test_empty_segment_postings() { let mut postings = SegmentPostings::empty(); assert!(!postings.advance()); assert!(!postings.advance()); assert_eq!(postings.len(), 0); } #[test] #[should_panic(expected = "Have you forgotten to call `.advance()`")] fn test_panic_if_doc_called_before_advance() { SegmentPostings::empty().doc(); } #[test] #[should_panic(expected = "Have you forgotten to call `.advance()`")] fn test_panic_if_freq_called_before_advance() { SegmentPostings::empty().term_freq(); } #[test] fn test_empty_block_segment_postings() { let mut postings = BlockSegmentPostings::empty(); assert!(!postings.advance()); assert_eq!(postings.doc_freq(), 0); } #[test] fn test_block_segment_postings() { let mut block_segments = build_block_postings(&(0..100_000).collect::<Vec<u32>>()); let mut offset: u32 = 0u32; // checking that the block before calling advance is empty assert!(block_segments.docs().is_empty()); // checking that the `doc_freq` is correct assert_eq!(block_segments.doc_freq(), 100_000); while let Some(block) = block_segments.next() { for (i, doc) in block.iter().cloned().enumerate() { assert_eq!(offset + (i as u32), doc); } offset += block.len() as u32; } } #[test] fn test_skip_right_at_new_block() { let mut doc_ids = (0..128).collect::<Vec<u32>>(); doc_ids.push(129); doc_ids.push(130); { let block_segments = build_block_postings(&doc_ids); let mut docset = SegmentPostings::from_block_postings(block_segments, None); assert_eq!(docset.skip_next(128), SkipResult::OverStep); assert_eq!(docset.doc(), 129); assert!(docset.advance()); assert_eq!(docset.doc(), 130); assert!(!docset.advance()); } { let block_segments = build_block_postings(&doc_ids); let mut docset = SegmentPostings::from_block_postings(block_segments, None); assert_eq!(docset.skip_next(129), SkipResult::Reached); assert_eq!(docset.doc(), 129); assert!(docset.advance()); assert_eq!(docset.doc(), 130); assert!(!docset.advance()); } { let block_segments = build_block_postings(&doc_ids); let mut docset = SegmentPostings::from_block_postings(block_segments, None); assert_eq!(docset.skip_next(131), SkipResult::End); } } fn build_block_postings(docs: &[DocId]) -> BlockSegmentPostings { let mut schema_builder = Schema::builder(); let int_field = schema_builder.add_u64_field("id", INDEXED); let schema = schema_builder.build(); let index = Index::create_in_ram(schema); let mut index_writer = index.writer_with_num_threads(1, 3_000_000).unwrap(); let mut last_doc = 0u32; for &doc in docs { for _ in last_doc..doc { index_writer.add_document(doc!(int_field=>1u64)); } index_writer.add_document(doc!(int_field=>0u64)); last_doc = doc + 1; } index_writer.commit().unwrap(); let searcher = index.reader().unwrap().searcher(); let segment_reader = searcher.segment_reader(0); let inverted_index = segment_reader.inverted_index(int_field); let term = Term::from_field_u64(int_field, 0u64); let term_info = inverted_index.get_term_info(&term).unwrap(); inverted_index.read_block_postings_from_terminfo(&term_info, IndexRecordOption::Basic) } #[test] fn test_block_segment_postings_skip() { for i in 0..4 { let mut block_postings = build_block_postings(&[3]); assert_eq!( block_postings.skip_to(i), BlockSegmentPostingsSkipResult::Success(0u32) ); assert_eq!( block_postings.skip_to(i), BlockSegmentPostingsSkipResult::Terminated ); } let mut block_postings = build_block_postings(&[3]); assert_eq!( block_postings.skip_to(4u32), BlockSegmentPostingsSkipResult::Terminated ); } #[test] fn test_block_segment_postings_skip2() { let mut docs = vec![0]; for i in 0..1300 { docs.push((i * i / 100) + i); } let mut block_postings = build_block_postings(&docs[..]); for i in vec![0, 424, 10000] { assert_eq!( block_postings.skip_to(i), BlockSegmentPostingsSkipResult::Success(0u32) ); let docs = block_postings.docs(); assert!(docs[0] <= i); assert!(docs.last().cloned().unwrap_or(0u32) >= i); } assert_eq!( block_postings.skip_to(100_000), BlockSegmentPostingsSkipResult::Terminated ); assert_eq!( block_postings.skip_to(101_000), BlockSegmentPostingsSkipResult::Terminated ); } #[test] fn test_reset_block_segment_postings() { let mut schema_builder = Schema::builder(); let int_field = schema_builder.add_u64_field("id", INDEXED); let schema = schema_builder.build(); let index = Index::create_in_ram(schema); let mut index_writer = index.writer_with_num_threads(1, 3_000_000).unwrap(); // create two postings list, one containg even number, // the other containing odd numbers. for i in 0..6 { let doc = doc!(int_field=> (i % 2) as u64); index_writer.add_document(doc); } index_writer.commit().unwrap(); let searcher = index.reader().unwrap().searcher(); let segment_reader = searcher.segment_reader(0); let mut block_segments; { let term = Term::from_field_u64(int_field, 0u64); let inverted_index = segment_reader.inverted_index(int_field); let term_info = inverted_index.get_term_info(&term).unwrap(); block_segments = inverted_index .read_block_postings_from_terminfo(&term_info, IndexRecordOption::Basic); } assert!(block_segments.advance()); assert_eq!(block_segments.docs(), &[0, 2, 4]); { let term = Term::from_field_u64(int_field, 1u64); let inverted_index = segment_reader.inverted_index(int_field); let term_info = inverted_index.get_term_info(&term).unwrap(); inverted_index.reset_block_postings_from_terminfo(&term_info, &mut block_segments); } assert!(block_segments.advance()); assert_eq!(block_segments.docs(), &[1, 3, 5]); } }
36.268323
130
0.587717
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_empty_segment_postings() {\n let mut postings = SegmentPostings::empty();\n assert!(!postings.advance());\n assert!(!postings.advance());\n assert_eq!(postings.len(), 0);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_panic_if_doc_called_before_advance() {\n SegmentPostings::empty().doc();\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_panic_if_freq_called_before_advance() {\n SegmentPostings::empty().term_freq();\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_empty_block_segment_postings() {\n let mut postings = BlockSegmentPostings::empty();\n assert!(!postings.advance());\n assert_eq!(postings.doc_freq(), 0);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_block_segment_postings() {\n let mut block_segments = build_block_postings(&(0..100_000).collect::<Vec<u32>>());\n let mut offset: u32 = 0u32;\n // checking that the block before calling advance is empty\n assert!(block_segments.docs().is_empty());\n // checking that the `doc_freq` is correct\n assert_eq!(block_segments.doc_freq(), 100_000);\n while let Some(block) = block_segments.next() {\n for (i, doc) in block.iter().cloned().enumerate() {\n assert_eq!(offset + (i as u32), doc);\n }\n offset += block.len() as u32;\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_skip_right_at_new_block() {\n let mut doc_ids = (0..128).collect::<Vec<u32>>();\n doc_ids.push(129);\n doc_ids.push(130);\n {\n let block_segments = build_block_postings(&doc_ids);\n let mut docset = SegmentPostings::from_block_postings(block_segments, None);\n assert_eq!(docset.skip_next(128), SkipResult::OverStep);\n assert_eq!(docset.doc(), 129);\n assert!(docset.advance());\n assert_eq!(docset.doc(), 130);\n assert!(!docset.advance());\n }\n {\n let block_segments = build_block_postings(&doc_ids);\n let mut docset = SegmentPostings::from_block_postings(block_segments, None);\n assert_eq!(docset.skip_next(129), SkipResult::Reached);\n assert_eq!(docset.doc(), 129);\n assert!(docset.advance());\n assert_eq!(docset.doc(), 130);\n assert!(!docset.advance());\n }\n {\n let block_segments = build_block_postings(&doc_ids);\n let mut docset = SegmentPostings::from_block_postings(block_segments, None);\n assert_eq!(docset.skip_next(131), SkipResult::End);\n }\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_block_segment_postings_skip() {\n for i in 0..4 {\n let mut block_postings = build_block_postings(&[3]);\n assert_eq!(\n block_postings.skip_to(i),\n BlockSegmentPostingsSkipResult::Success(0u32)\n );\n assert_eq!(\n block_postings.skip_to(i),\n BlockSegmentPostingsSkipResult::Terminated\n );\n }\n let mut block_postings = build_block_postings(&[3]);\n assert_eq!(\n block_postings.skip_to(4u32),\n BlockSegmentPostingsSkipResult::Terminated\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_block_segment_postings_skip2() {\n let mut docs = vec![0];\n for i in 0..1300 {\n docs.push((i * i / 100) + i);\n }\n let mut block_postings = build_block_postings(&docs[..]);\n for i in vec![0, 424, 10000] {\n assert_eq!(\n block_postings.skip_to(i),\n BlockSegmentPostingsSkipResult::Success(0u32)\n );\n let docs = block_postings.docs();\n assert!(docs[0] <= i);\n assert!(docs.last().cloned().unwrap_or(0u32) >= i);\n }\n assert_eq!(\n block_postings.skip_to(100_000),\n BlockSegmentPostingsSkipResult::Terminated\n );\n assert_eq!(\n block_postings.skip_to(101_000),\n BlockSegmentPostingsSkipResult::Terminated\n );\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn test_reset_block_segment_postings() {\n let mut schema_builder = Schema::builder();\n let int_field = schema_builder.add_u64_field(\"id\", INDEXED);\n let schema = schema_builder.build();\n let index = Index::create_in_ram(schema);\n let mut index_writer = index.writer_with_num_threads(1, 3_000_000).unwrap();\n // create two postings list, one containg even number,\n // the other containing odd numbers.\n for i in 0..6 {\n let doc = doc!(int_field=> (i % 2) as u64);\n index_writer.add_document(doc);\n }\n index_writer.commit().unwrap();\n let searcher = index.reader().unwrap().searcher();\n let segment_reader = searcher.segment_reader(0);\n\n let mut block_segments;\n {\n let term = Term::from_field_u64(int_field, 0u64);\n let inverted_index = segment_reader.inverted_index(int_field);\n let term_info = inverted_index.get_term_info(&term).unwrap();\n block_segments = inverted_index\n .read_block_postings_from_terminfo(&term_info, IndexRecordOption::Basic);\n }\n assert!(block_segments.advance());\n assert_eq!(block_segments.docs(), &[0, 2, 4]);\n {\n let term = Term::from_field_u64(int_field, 1u64);\n let inverted_index = segment_reader.inverted_index(int_field);\n let term_info = inverted_index.get_term_info(&term).unwrap();\n inverted_index.reset_block_postings_from_terminfo(&term_info, &mut block_segments);\n }\n assert!(block_segments.advance());\n assert_eq!(block_segments.docs(), &[1, 3, 5]);\n }\n}" ]
f70e93a80ef1055a5f6ea053ca2a4787a45d68de
26,854
rs
Rust
src/context.rs
polachok/swagger-rs
569b50f39982c4634d041a322afa008b6f10e2e3
[ "Apache-2.0" ]
null
null
null
src/context.rs
polachok/swagger-rs
569b50f39982c4634d041a322afa008b6f10e2e3
[ "Apache-2.0" ]
null
null
null
src/context.rs
polachok/swagger-rs
569b50f39982c4634d041a322afa008b6f10e2e3
[ "Apache-2.0" ]
null
null
null
//! Module for API context management. //! //! This module defines traits and structs that can be used to manage //! contextual data related to a request, as it is passed through a series of //! hyper services. //! //! See the `context_tests` module below for examples of how to use. use super::XSpanIdString; use auth::{AuthData, Authorization}; use futures::future::Future; use hyper; use std::marker::Sized; /// Defines methods for accessing, modifying, adding and removing the data stored /// in a context. Used to specify the requirements that a hyper service makes on /// a generic context type that it receives with a request, e.g. /// /// ```rust /// # extern crate hyper; /// # extern crate swagger; /// # extern crate futures; /// # /// # use swagger::context::*; /// # use futures::future::{Future, ok}; /// # use std::marker::PhantomData; /// # /// # struct MyItem; /// # fn do_something_with_my_item(item: &MyItem) {} /// # /// struct MyService<C> { /// marker: PhantomData<C>, /// } /// /// impl<C> hyper::server::Service for MyService<C> /// where C: Has<MyItem>, /// { /// type Request = (hyper::Request, C); /// type Response = hyper::Response; /// type Error = hyper::Error; /// type Future = Box<Future<Item=Self::Response, Error=Self::Error>>; /// fn call(&self, (req, context) : Self::Request) -> Self::Future { /// do_something_with_my_item(Has::<MyItem>::get(&context)); /// Box::new(ok(hyper::Response::new())) /// } /// } /// /// # fn main() {} /// ``` pub trait Has<T> { /// Get an immutable reference to the value. fn get(&self) -> &T; /// Get a mutable reference to the value. fn get_mut(&mut self) -> &mut T; /// Set the value. fn set(&mut self, value: T); } /// Defines a method for permanently extracting a value, changing the resulting /// type. Used to specify that a hyper service consumes some data from the context, /// making it unavailable to later layers, e.g. /// /// ```rust /// # extern crate hyper; /// # extern crate swagger; /// # extern crate futures; /// # /// # use swagger::context::*; /// # use futures::future::{Future, ok}; /// # use std::marker::PhantomData; /// # /// struct MyItem1; /// struct MyItem2; /// struct MyItem3; /// /// struct MiddlewareService<T, C> { /// inner: T, /// marker: PhantomData<C>, /// } /// /// impl<T, C, D, E> hyper::server::Service for MiddlewareService<T, C> /// where /// C: Pop<MyItem1, Result=D>, /// D: Pop<MyItem2, Result=E>, /// E: Pop<MyItem3>, /// T: hyper::server::Service<Request = (hyper::Request, E::Result)> /// { /// type Request = (hyper::Request, C); /// type Response = T::Response; /// type Error = T::Error; /// type Future = T::Future; /// fn call(&self, (req, context) : Self::Request) -> Self::Future { /// /// // type annotations optional, included for illustrative purposes /// let (_, context): (MyItem1, D) = context.pop(); /// let (_, context): (MyItem2, E) = context.pop(); /// let (_, context): (MyItem3, E::Result) = context.pop(); /// /// self.inner.call((req, context)) /// } /// } /// /// # fn main() {} pub trait Pop<T> { /// The type that remains after the value has been popped. type Result; /// Extracts a value. fn pop(self) -> (T, Self::Result); } /// Defines a method for inserting a value, changing the resulting /// type. Used to specify that a hyper service adds some data from the context, /// making it available to later layers, e.g. /// /// ```rust /// # extern crate hyper; /// # extern crate swagger; /// # extern crate futures; /// # /// # use swagger::context::*; /// # use futures::future::{Future, ok}; /// # use std::marker::PhantomData; /// # /// struct MyItem1; /// struct MyItem2; /// struct MyItem3; /// /// struct MiddlewareService<T, C> { /// inner: T, /// marker: PhantomData<C>, /// } /// /// impl<T, C, D, E> hyper::server::Service for MiddlewareService<T, C> /// where /// C: Push<MyItem1, Result=D>, /// D: Push<MyItem2, Result=E>, /// E: Push<MyItem3>, /// T: hyper::server::Service<Request = (hyper::Request, E::Result)> /// { /// type Request = (hyper::Request, C); /// type Response = T::Response; /// type Error = T::Error; /// type Future = T::Future; /// fn call(&self, (req, context) : Self::Request) -> Self::Future { /// let context = context /// .push(MyItem1{}) /// .push(MyItem2{}) /// .push(MyItem3{}); /// self.inner.call((req, context)) /// } /// } /// /// # fn main() {} pub trait Push<T> { /// The type that results from adding an item. type Result; /// Inserts a value. fn push(self, T) -> Self::Result; } /// Defines a struct that can be used to build up contexts recursively by /// adding one item to the context at a time, and a unit struct representing an /// empty context. The first argument is the name of the newly defined context struct /// that is used to add an item to the context, the second argument is the name of /// the empty context struct, and subsequent arguments are the types /// that can be stored in contexts built using these struct. /// /// A cons list built using the generated context type will implement Has<T> and Pop<T> /// for each type T that appears in the list, provided that the list only /// contains the types that were passed to the macro invocation after the context /// type name. /// /// All list types constructed using the generated types will implement `Push<T>` /// for all `T`, but it should ony be used when `T` is one of the types passed /// to the macro invocation, otherwise it might not be possible to retrieve the /// inserted value. /// /// E.g. /// /// ```rust /// # #[macro_use] extern crate swagger; /// # use swagger::{Has, Pop, Push}; /// /// struct MyType1; /// struct MyType2; /// struct MyType3; /// struct MyType4; /// /// new_context_type!(MyContext, MyEmpContext, MyType1, MyType2, MyType3); /// /// fn use_has_my_type_1<T: Has<MyType1>> (_: &T) {} /// fn use_has_my_type_2<T: Has<MyType2>> (_: &T) {} /// fn use_has_my_type_3<T: Has<MyType3>> (_: &T) {} /// fn use_has_my_type_4<T: Has<MyType4>> (_: &T) {} /// /// // will implement `Has<MyType1>` and `Has<MyType2>` because these appear /// // in the type, and were passed to `new_context_type!`. Will not implement /// // `Has<MyType3>` even though it was passed to `new_context_type!`, because /// // it is not included in the type. /// type ExampleContext = MyContext<MyType1, MyContext<MyType2, MyEmpContext>>; /// /// // Will not implement `Has<MyType4>` even though it appears in the type, /// // because `MyType4` was not passed to `new_context_type!`. /// type BadContext = MyContext<MyType1, MyContext<MyType4, MyEmpContext>>; /// /// fn main() { /// let context : ExampleContext = /// MyEmpContext::default() /// .push(MyType2{}) /// .push(MyType1{}); /// /// use_has_my_type_1(&context); /// use_has_my_type_2(&context); /// // use_has_my_type3(&context); // will fail /// /// let bad_context: BadContext = /// MyEmpContext::default() /// .push(MyType4{}) /// .push(MyType1{}); /// // use_has_my_type_4(&bad_context); // will fail /// /// } /// ``` /// /// See the `context_tests` module for more usage examples. #[macro_export] macro_rules! new_context_type { ($context_name:ident, $empty_context_name:ident, $($types:ty),+ ) => { /// Wrapper type for building up contexts recursively, adding one item /// to the context at a time. #[derive(Debug, Clone, Default, PartialEq, Eq)] pub struct $context_name<T, C> { head: T, tail: C, } /// Unit struct representing an empty context with no data in it. #[derive(Debug, Clone, Default, PartialEq, Eq)] pub struct $empty_context_name; // implement `Push<T>` on the empty context type for any `T`, so that // items can be added to the context impl<U> $crate::Push<U> for $empty_context_name { type Result = $context_name<U, Self>; fn push(self, item: U) -> Self::Result { $context_name{head: item, tail: Self::default()} } } // implement `Has<T>` for a list where `T` is the type of the head impl<T, C> $crate::Has<T> for $context_name<T, C> { fn set(&mut self, item: T) { self.head = item; } fn get(&self) -> &T { &self.head } fn get_mut(&mut self) -> &mut T { &mut self.head } } // implement `Pop<T>` for a list where `T` is the type of the head impl<T, C> $crate::Pop<T> for $context_name<T, C> { type Result = C; fn pop(self) -> (T, Self::Result) { (self.head, self.tail) } } // implement `Push<U>` for non-empty lists, for all types `U` impl<C, T, U> $crate::Push<U> for $context_name<T, C> { type Result = $context_name<U, Self>; fn push(self, item: U) -> Self::Result { $context_name{head: item, tail: self} } } // Add implementations of `Has<T>` and `Pop<T>` when `T` is any type stored in // the list, not just the head. new_context_type!(impl extend_has $context_name, $empty_context_name, $($types),+); }; // "HELPER" MACRO CASE - NOT FOR EXTERNAL USE // takes a type `Type1` ($head) and a non-empty list of types `Types` ($tail). First calls // another helper macro to define the following impls, for each `Type2` in `Types`: // ``` // impl<C: Has<Type1> Has<Type1> for $context_name<Type2, C> {...} // impl<C: Has<Type2> Has<Type2> for $context_name<Type1, C> {...} // impl<C: Pop<Type1> Pop<Type1> for $context_name<Type2, C> {...} // impl<C: Pop<Type2> Pop<Type2> for $context_name<Type1, C> {...} // ``` // then calls itself again with the rest of the list. The end result is to define the above // impls for all distinct pairs of types in the original list. (impl extend_has $context_name:ident, $empty_context_name:ident, $head:ty, $($tail:ty),+ ) => { // new_context_type!( impl extend_has_helper $context_name, $empty_context_name, $head, $($tail),+ ); new_context_type!(impl extend_has $context_name, $empty_context_name, $($tail),+); }; // "HELPER" MACRO CASE - NOT FOR EXTERNAL USE // base case of the preceding helper macro - was passed an empty list of types, so // we don't need to do anything. (impl extend_has $context_name:ident, $empty_context_name:ident, $head:ty) => {}; // "HELPER" MACRO CASE - NOT FOR EXTERNAL USE // takes a type `Type1` ($type) and a non-empty list of types `Types` ($types). For // each `Type2` in `Types`, defines the following impls: // ``` // impl<C: Has<Type1> Has<Type1> for $context_name<Type2, C> {...} // impl<C: Has<Type2> Has<Type2> for $context_name<Type1, C> {...} // impl<C: Pop<Type1> Pop<Type1> for $context_name<Type2, C> {...} // impl<C: Pop<Type2> Pop<Type2> for $context_name<Type1, C> {...} // ``` // (impl extend_has_helper $context_name:ident, $empty_context_name:ident, $type:ty, $($types:ty),+ ) => { $( impl<C: $crate::Has<$type>> $crate::Has<$type> for $context_name<$types, C> { fn set(&mut self, item: $type) { self.tail.set(item); } fn get(&self) -> &$type { self.tail.get() } fn get_mut(&mut self) -> &mut $type { self.tail.get_mut() } } impl<C: $crate::Has<$types>> $crate::Has<$types> for $context_name<$type, C> { fn set(&mut self, item: $types) { self.tail.set(item); } fn get(&self) -> &$types { self.tail.get() } fn get_mut(&mut self) -> &mut $types { self.tail.get_mut() } } impl<C> $crate::Pop<$type> for $context_name<$types, C> where C: Pop<$type> { type Result = $context_name<$types, C::Result>; fn pop(self) -> ($type, Self::Result) { let (value, tail) = self.tail.pop(); (value, $context_name{ head: self.head, tail}) } } impl<C> $crate::Pop<$types> for $context_name<$type, C> where C: Pop<$types> { type Result = $context_name<$type, C::Result>; fn pop(self) -> ($types, Self::Result) { let (value, tail) = self.tail.pop(); (value, $context_name{ head: self.head, tail}) } } )+ }; } /// Create a default context type to export. new_context_type!( ContextBuilder, EmptyContext, XSpanIdString, Option<AuthData>, Option<Authorization> ); /// Macro for easily defining context types. The first argument should be a /// context type created with `new_context_type!` and subsequent arguments are the /// types to be stored in the context, with the outermost first. /// /// ```rust /// # #[macro_use] extern crate swagger; /// # use swagger::{Has, Pop, Push}; /// /// # struct Type1; /// # struct Type2; /// # struct Type3; /// /// # new_context_type!(MyContext, MyEmptyContext, Type1, Type2, Type3); /// /// // the following two types are identical /// type ExampleContext1 = make_context_ty!(MyContext, MyEmptyContext, Type1, Type2, Type3); /// type ExampleContext2 = MyContext<Type1, MyContext<Type2, MyContext<Type3, MyEmptyContext>>>; /// /// // e.g. this wouldn't compile if they were different types /// fn do_nothing(input: ExampleContext1) -> ExampleContext2 { /// input /// } /// /// # fn main() {} /// ``` #[macro_export] macro_rules! make_context_ty { ($context_name:ident, $empty_context_name:ident, $type:ty $(, $types:ty)* $(,)* ) => { $context_name<$type, make_context_ty!($context_name, $empty_context_name, $($types),*)> }; ($context_name:ident, $empty_context_name:ident $(,)* ) => { $empty_context_name }; } /// Macro for easily defining context values. The first argument should be a /// context type created with `new_context_type!` and subsequent arguments are the /// values to be stored in the context, with the outermost first. /// /// ```rust /// # #[macro_use] extern crate swagger; /// # use swagger::{Has, Pop, Push}; /// /// # #[derive(PartialEq, Eq, Debug)] /// # struct Type1; /// # #[derive(PartialEq, Eq, Debug)] /// # struct Type2; /// # #[derive(PartialEq, Eq, Debug)] /// # struct Type3; /// /// # new_context_type!(MyContext, MyEmptyContext, Type1, Type2, Type3); /// /// fn main() { /// // the following are equivalent /// let context1 = make_context!(MyContext, MyEmptyContext, Type1 {}, Type2 {}, Type3 {}); /// let context2 = MyEmptyContext::default() /// .push(Type3{}) /// .push(Type2{}) /// .push(Type1{}); /// /// assert_eq!(context1, context2); /// } /// ``` #[macro_export] macro_rules! make_context { ($context_name:ident, $empty_context_name:ident, $value:expr $(, $values:expr)* $(,)*) => { make_context!($context_name, $empty_context_name, $($values),*).push($value) }; ($context_name:ident, $empty_context_name:ident $(,)* ) => { $empty_context_name::default() }; } /// Context wrapper, to bind an API with a context. #[derive(Debug)] pub struct ContextWrapper<'a, T: 'a, C> { api: &'a T, context: C, } impl<'a, T, C> ContextWrapper<'a, T, C> { /// Create a new ContextWrapper, binding the API and context. pub fn new(api: &'a T, context: C) -> ContextWrapper<'a, T, C> { ContextWrapper { api, context } } /// Borrows the API. pub fn api(&self) -> &T { self.api } /// Borrows the context. pub fn context(&self) -> &C { &self.context } } /// Trait to extend an API to make it easy to bind it to a context. pub trait ContextWrapperExt<'a, C> where Self: Sized, { /// Binds this API to a context. fn with_context(self: &'a Self, context: C) -> ContextWrapper<'a, Self, C> { ContextWrapper::<Self, C>::new(self, context) } } /// Trait designed to ensure consistency in context used by swagger middlewares /// /// ```rust /// # extern crate hyper; /// # extern crate swagger; /// # use swagger::context::*; /// # use std::marker::PhantomData; /// # use swagger::auth::{AuthData, Authorization}; /// # use swagger::XSpanIdString; /// /// struct ExampleMiddleware<T, C> { /// inner: T, /// marker: PhantomData<C>, /// } /// /// impl<T, C> hyper::server::Service for ExampleMiddleware<T, C> /// where /// T: SwaggerService<C>, /// C: Has<Option<AuthData>> + /// Has<Option<Authorization>> + /// Has<XSpanIdString> + /// Clone + /// 'static, /// { /// type Request = (hyper::Request, C); /// type Response = T::Response; /// type Error = T::Error; /// type Future = T::Future; /// fn call(&self, (req, context) : Self::Request) -> Self::Future { /// self.inner.call((req, context)) /// } /// } /// ``` pub trait SwaggerService<C>: Clone + hyper::server::Service< Request = (hyper::server::Request, C), Response = hyper::server::Response, Error = hyper::Error, Future = Box<Future<Item = hyper::server::Response, Error = hyper::Error>>, > where C: Has<Option<AuthData>> + Has<Option<Authorization>> + Has<XSpanIdString> + Clone + 'static, { } impl<T, C> SwaggerService<C> for T where T: Clone + hyper::server::Service< Request = (hyper::server::Request, C), Response = hyper::server::Response, Error = hyper::Error, Future = Box<Future<Item = hyper::server::Response, Error = hyper::Error>>, >, C: Has<Option<AuthData>> + Has<Option<Authorization>> + Has<XSpanIdString> + Clone + 'static, { } #[cfg(test)] mod context_tests { use super::*; use futures::future::{ok, Future}; use hyper::server::{NewService, Service}; use hyper::{Error, Method, Request, Response, Uri}; use std::io; use std::marker::PhantomData; use std::str::FromStr; struct ContextItem1; struct ContextItem2; struct ContextItem3; fn use_item_1_owned(_: ContextItem1) {} fn use_item_2(_: &ContextItem2) {} fn use_item_3_owned(_: ContextItem3) {} // Example of a "terminating" hyper service using contexts - i.e. doesn't // pass a request and its context on to a wrapped service. struct InnerService<C> where C: Has<ContextItem2> + Pop<ContextItem3>, { marker: PhantomData<C>, } // Use trait bounds to indicate what your service will use from the context. // use `Pop` if you want to take ownership of a value stored in the context, // or `Has` if a reference is enough. impl<C> Service for InnerService<C> where C: Has<ContextItem2> + Pop<ContextItem3>, { type Request = (Request, C); type Response = Response; type Error = Error; type Future = Box<Future<Item = Response, Error = Error>>; fn call(&self, (_, context): Self::Request) -> Self::Future { use_item_2(Has::<ContextItem2>::get(&context)); let (item3, _): (ContextItem3, _) = context.pop(); use_item_3_owned(item3); Box::new(ok(Response::new())) } } struct InnerNewService<C> where C: Has<ContextItem2> + Pop<ContextItem3>, { marker: PhantomData<C>, } impl<C> InnerNewService<C> where C: Has<ContextItem2> + Pop<ContextItem3>, { fn new() -> Self { InnerNewService { marker: PhantomData, } } } impl<C> NewService for InnerNewService<C> where C: Has<ContextItem2> + Pop<ContextItem3>, { type Request = (Request, C); type Response = Response; type Error = Error; type Instance = InnerService<C>; fn new_service(&self) -> Result<Self::Instance, io::Error> { Ok(InnerService { marker: PhantomData, }) } } // Example of a middleware service using contexts, i.e. a hyper service that // processes a request (and its context) and passes it on to another wrapped // service. struct MiddleService<T, C> where C: Pop<ContextItem1>, C::Result: Push<ContextItem2>, <C::Result as Push<ContextItem2>>::Result: Push<ContextItem3>, T: Service< Request = ( Request, <<C::Result as Push<ContextItem2>>::Result as Push<ContextItem3>>::Result, ), >, { inner: T, marker1: PhantomData<C>, } // Use trait bounds to indicate what modifications your service will make // to the context, chaining them as below. impl<T, C, D, E> Service for MiddleService<T, C> where C: Pop<ContextItem1, Result = D>, D: Push<ContextItem2, Result = E>, E: Push<ContextItem3>, T: Service<Request = (Request, E::Result)>, { type Request = (Request, C); type Response = T::Response; type Error = T::Error; type Future = T::Future; fn call(&self, (req, context): Self::Request) -> Self::Future { let (item, context) = context.pop(); use_item_1_owned(item); let context = context.push(ContextItem2 {}).push(ContextItem3 {}); self.inner.call((req, context)) } } struct MiddleNewService<T, C> where C: Pop<ContextItem1>, C::Result: Push<ContextItem2>, <C::Result as Push<ContextItem2>>::Result: Push<ContextItem3>, T: NewService< Request = ( Request, <<C::Result as Push<ContextItem2>>::Result as Push<ContextItem3>>::Result, ), >, { inner: T, marker1: PhantomData<C>, } impl<T, C, D, E> NewService for MiddleNewService<T, C> where C: Pop<ContextItem1, Result = D>, D: Push<ContextItem2, Result = E>, E: Push<ContextItem3>, T: NewService<Request = (Request, E::Result)>, { type Request = (Request, C); type Response = T::Response; type Error = T::Error; type Instance = MiddleService<T::Instance, C>; fn new_service(&self) -> Result<Self::Instance, io::Error> { self.inner.new_service().map(|s| MiddleService { inner: s, marker1: PhantomData, }) } } impl<T, C, D, E> MiddleNewService<T, C> where C: Pop<ContextItem1, Result = D>, D: Push<ContextItem2, Result = E>, E: Push<ContextItem3>, T: NewService<Request = (Request, E::Result)>, { fn new(inner: T) -> Self { MiddleNewService { inner, marker1: PhantomData, } } } // Example of a top layer service that creates a context to be used by // lower layers. struct OuterService<T, C> where C: Default + Push<ContextItem1>, T: Service<Request = (Request, C::Result)>, { inner: T, marker: PhantomData<C>, } // Use a `Default` trait bound so that the context can be created. Use // `Push` trait bounds for each type that you will add to the newly // created context. impl<T, C> Service for OuterService<T, C> where C: Default + Push<ContextItem1>, T: Service<Request = (Request, C::Result)>, { type Request = Request; type Response = T::Response; type Error = T::Error; type Future = T::Future; fn call(&self, req: Self::Request) -> Self::Future { let context = C::default().push(ContextItem1 {}); self.inner.call((req, context)) } } struct OuterNewService<T, C> where C: Default + Push<ContextItem1>, T: NewService<Request = (Request, C::Result)>, { inner: T, marker: PhantomData<C>, } impl<T, C> NewService for OuterNewService<T, C> where C: Default + Push<ContextItem1>, T: NewService<Request = (Request, C::Result)>, { type Request = Request; type Response = T::Response; type Error = T::Error; type Instance = OuterService<T::Instance, C>; fn new_service(&self) -> Result<Self::Instance, io::Error> { self.inner.new_service().map(|s| OuterService { inner: s, marker: PhantomData, }) } } impl<T, C> OuterNewService<T, C> where C: Default + Push<ContextItem1>, T: NewService<Request = (Request, C::Result)>, { fn new(inner: T) -> Self { OuterNewService { inner, marker: PhantomData, } } } // Example of use by a service in its main.rs file. At this point you know // all the hyper service layers you will be using, and what requirements // their contexts types have. Use the `new_context_type!` macro to create // a context type and empty context type that are capable of containing all the // types that your hyper services require. new_context_type!( MyContext, MyEmptyContext, ContextItem1, ContextItem2, ContextItem3 ); #[test] fn send_request() { // annotate the outermost service to indicate that the context type it // uses is the empty context type created by the above macro invocation. // the compiler should infer all the other context types. let new_service = OuterNewService::<_, MyEmptyContext>::new(MiddleNewService::new( InnerNewService::new(), )); let req = Request::new(Method::Post, Uri::from_str("127.0.0.1:80").unwrap()); new_service .new_service() .expect("Failed to start new service") .call(req) .wait() .expect("Service::call returned an error"); } }
32.510896
99
0.569785
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn send_request() {\n // annotate the outermost service to indicate that the context type it\n // uses is the empty context type created by the above macro invocation.\n // the compiler should infer all the other context types.\n let new_service = OuterNewService::<_, MyEmptyContext>::new(MiddleNewService::new(\n InnerNewService::new(),\n ));\n\n let req = Request::new(Method::Post, Uri::from_str(\"127.0.0.1:80\").unwrap());\n new_service\n .new_service()\n .expect(\"Failed to start new service\")\n .call(req)\n .wait()\n .expect(\"Service::call returned an error\");\n }\n}" ]
f70f027b8fd4e415817fb8f72201010519cffc7b
4,474
rs
Rust
src/tetrominos/z.rs
ThibautGeriz/tetris
2f99998fc05fe1e120f4a7d69801c05b95e110e4
[ "MIT" ]
null
null
null
src/tetrominos/z.rs
ThibautGeriz/tetris
2f99998fc05fe1e120f4a7d69801c05b95e110e4
[ "MIT" ]
9
2021-01-04T18:25:19.000Z
2021-01-06T20:16:10.000Z
src/tetrominos/z.rs
ThibautGeriz/tetris
2f99998fc05fe1e120f4a7d69801c05b95e110e4
[ "MIT" ]
null
null
null
use super::{Tetromino, TetrominoCommon, SQUARE_COUNT}; use crate::color::Color; use crate::playground::Playground; use crate::playground::COLUMN_COUNT; #[allow(unused_imports)] use rand::{thread_rng, Error, Rng, RngCore}; const COLOR: Color = Color::Red; pub struct Z { squares: [usize; SQUARE_COUNT], } impl Z { fn create(rng: &mut Box<dyn RngCore>) -> Self { let index = rng.gen_range(2, COLUMN_COUNT - 2); let mut squares = [0; SQUARE_COUNT]; squares[0] = index; squares[1] = index + 1; squares[2] = index + COLUMN_COUNT + 1; squares[3] = index + COLUMN_COUNT + 2; Z { squares } } } impl TetrominoCommon for Z { fn set_square(&mut self, index: usize, value: usize) { self.squares[index] = value; } fn get_square(&self, index: usize) -> usize { self.squares[index] } fn get_color(&self) -> Color { COLOR } } impl Tetromino for Z { fn new() -> Self { let mut rng = Box::new(thread_rng()) as Box<dyn RngCore>; Z::create(&mut rng) } fn insert_into_playground(&self, playground: &mut Playground) -> bool { <Z as TetrominoCommon>::insert_into_playground(self, playground) } fn go_down(&mut self, playground: &mut Playground) -> bool { <Z as TetrominoCommon>::go_down(self, playground) } fn go_right(&mut self, playground: &mut Playground) -> bool { <Z as TetrominoCommon>::go_right(self, playground) } fn go_left(&mut self, playground: &mut Playground) -> bool { <Z as TetrominoCommon>::go_left(self, playground) } fn go_bottom(&mut self, playground: &mut Playground) -> bool { <Z as TetrominoCommon>::go_bottom(self, playground) } } impl Default for Z { fn default() -> Self { Self::new() } } #[cfg(test)] mod tests { use super::*; #[cfg(test)] struct FakeGenerator { next_number: u32, } #[cfg(test)] impl RngCore for FakeGenerator { fn next_u32(&mut self) -> u32 { self.next_number } fn next_u64(&mut self) -> u64 { self.next_number as u64 } fn fill_bytes(&mut self, _dest: &mut [u8]) {} fn try_fill_bytes(&mut self, _dest: &mut [u8]) -> Result<(), Error> { Result::Ok(()) } } #[cfg(test)] fn get_fake_rand(next_number: u32) -> Box<dyn RngCore> { Box::new(FakeGenerator { next_number }) as Box<dyn RngCore> } #[test] fn insert_into_playground_true() { // given let mut fake_random = get_fake_rand(2); let mut playground = Playground::new(); let tetromino = Z::create(&mut fake_random); // when let is_inserted = <Z as TetrominoCommon>::insert_into_playground(&tetromino, &mut playground); // then assert_eq!(is_inserted, true); let mut expected_squares = [0; 4]; expected_squares[0] = 2; expected_squares[1] = 3; expected_squares[2] = 13; expected_squares[3] = 14; assert_eq!(tetromino.squares, expected_squares); assert_eq!(playground.get_squares()[2], Color::Red); assert_eq!(playground.get_squares()[3], Color::Red); assert_eq!(playground.get_squares()[13], Color::Red); assert_eq!(playground.get_squares()[14], Color::Red); } #[test] fn go_down_empty_playground() { // given let mut fake_random = get_fake_rand(2); let mut playground = Playground::new(); let mut tetromino = Z::create(&mut fake_random); <Z as TetrominoCommon>::insert_into_playground(&tetromino, &mut playground); // when let went_down = <Z as Tetromino>::go_down(&mut tetromino, &mut playground); // then assert_eq!(went_down, true); let mut expected_squares = [0; 4]; expected_squares[0] = 12; expected_squares[1] = 13; expected_squares[2] = 23; expected_squares[3] = 24; assert_eq!(tetromino.squares, expected_squares); assert_eq!(playground.get_squares()[2], Color::None); assert_eq!(playground.get_squares()[3], Color::None); assert_eq!(playground.get_squares()[12], Color::Red); assert_eq!(playground.get_squares()[13], Color::Red); assert_eq!(playground.get_squares()[23], Color::Red); assert_eq!(playground.get_squares()[24], Color::Red); } }
29.629139
88
0.595887
[ "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn insert_into_playground_true() {\n // given\n let mut fake_random = get_fake_rand(2);\n let mut playground = Playground::new();\n let tetromino = Z::create(&mut fake_random);\n\n // when\n let is_inserted =\n <Z as TetrominoCommon>::insert_into_playground(&tetromino, &mut playground);\n\n // then\n assert_eq!(is_inserted, true);\n let mut expected_squares = [0; 4];\n expected_squares[0] = 2;\n expected_squares[1] = 3;\n expected_squares[2] = 13;\n expected_squares[3] = 14;\n assert_eq!(tetromino.squares, expected_squares);\n assert_eq!(playground.get_squares()[2], Color::Red);\n assert_eq!(playground.get_squares()[3], Color::Red);\n assert_eq!(playground.get_squares()[13], Color::Red);\n assert_eq!(playground.get_squares()[14], Color::Red);\n }\n}", "#[cfg(test)]\nmod tests {\n use super::*;\n #[test]\n fn go_down_empty_playground() {\n // given\n let mut fake_random = get_fake_rand(2);\n let mut playground = Playground::new();\n let mut tetromino = Z::create(&mut fake_random);\n <Z as TetrominoCommon>::insert_into_playground(&tetromino, &mut playground);\n\n // when\n let went_down = <Z as Tetromino>::go_down(&mut tetromino, &mut playground);\n\n // then\n assert_eq!(went_down, true);\n let mut expected_squares = [0; 4];\n expected_squares[0] = 12;\n expected_squares[1] = 13;\n expected_squares[2] = 23;\n expected_squares[3] = 24;\n assert_eq!(tetromino.squares, expected_squares);\n assert_eq!(playground.get_squares()[2], Color::None);\n assert_eq!(playground.get_squares()[3], Color::None);\n assert_eq!(playground.get_squares()[12], Color::Red);\n assert_eq!(playground.get_squares()[13], Color::Red);\n assert_eq!(playground.get_squares()[23], Color::Red);\n assert_eq!(playground.get_squares()[24], Color::Red);\n }\n}" ]