text stringlengths 8 4.13M |
|---|
use format_strings::{ ANSI_RESET, ANSI_YELLOW, ANSI_GREEN };
use std::old_io::net::ip::SocketAddr;
use std::old_io::net::tcp::TcpAcceptor;
use std::old_io::{ TcpListener, TcpStream, Acceptor, Listener, BufferedStream };
use std::sync::mpsc::channel;
use std::sync::mpsc::{ Sender, Receiver };
use std::collections::HashMap;
use std::thread::Thread;
use std::str;
use connection::Connection;
enum Action {
Add(SocketAddr, TcpStream),
Remove(SocketAddr),
Broadcast(SocketAddr, String)
}
pub fn start() {
let listener = TcpListener::bind("0.0.0.0:6262").unwrap();
let acceptor = listener.listen().unwrap();
let (sender, receiver) = channel();
Thread::spawn(move || manage_connections(receiver));
let _ = Thread::scoped(move || accept_connections(acceptor, sender));
println!("Listening on 0.0.0.0:6262");
}
fn accept_connections(mut acceptor: TcpAcceptor, sender: Sender<Action>) {
loop {
if let Ok(mut stream) = acceptor.accept() {
if let Ok(addr) = stream.peer_name() {
let sender = sender.clone();
sender.send(Action::Add(addr, stream.clone())).ok();
Thread::spawn(move || handle_connection(BufferedStream::new(stream), addr, sender));
}
}
}
}
fn handle_connection(mut stream: BufferedStream<TcpStream>, addr: SocketAddr, sender: Sender<Action>) {
stream.write_line(&*format!("----- you ({}) have connected -----", addr)).ok();
stream.flush().ok();
while let Ok(data) = stream.read_line() {
sender.send(Action::Broadcast(addr, format!("{}[{}]{} {}", ANSI_GREEN, addr, ANSI_RESET, data))).ok();
}
sender.send(Action::Remove(addr)).ok();
}
fn manage_connections(receiver: Receiver<Action>) {
let mut connections = HashMap::new();
while let Ok(message) = receiver.recv() {
match message {
Action::Add(addr, stream) => add_connection(&mut connections, &addr, stream),
Action::Remove(addr) => remove_connection(&mut connections, &addr),
Action::Broadcast(addr, msg) => broadcast(&mut connections, &addr, msg.as_bytes()),
}
}
fn broadcast(connections: &mut HashMap<SocketAddr, Connection>, from: &SocketAddr, msg: &[u8]) {
println!("broadcasting msg: {}", str::from_utf8(msg).unwrap());
for (addr, mut connection) in connections.iter_mut() {
if *from == *addr {
continue;
}
connection.stream.write(msg).ok();
connection.stream.flush().ok();
}
}
fn add_connection(connections: &mut HashMap<SocketAddr, Connection>, addr: &SocketAddr, stream: TcpStream) {
connections.insert(*addr, Connection {
addr: *addr,
stream: stream,
});
let msg = format!("({} connections) ----- new connection from {} -----", connections.len(), addr);
println!("{}", msg);
broadcast(connections, addr, (msg + "\n").as_bytes());
}
fn remove_connection(connections: &mut HashMap<SocketAddr, Connection>, addr: &SocketAddr) {
connections.remove(addr);
let msg = format!("({} connections) {}----- {} is disconnected -----{}", connections.len(), ANSI_YELLOW, addr, ANSI_RESET);
println!("{}", msg);
broadcast(connections, addr, (msg + "\n").as_bytes());
}
}
|
#[macro_use]
pub mod macros;
use crate::entry::Entry;
use crossbeam_channel::{Receiver, Sender};
use log::error;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::error::Error;
use std::net::{SocketAddr, ToSocketAddrs, UdpSocket};
use std::sync::Arc;
#[derive(PartialEq, Serialize, Deserialize, Debug)]
pub enum Message {
AppendEntriesRequest(AppendEntriesRequest),
AppendEntriesResponse(AppendEntriesResponse),
RequestVoteRequest(RequestVoteRequest),
RequestVoteResponse(RequestVoteResponse),
}
#[derive(PartialEq, Serialize, Deserialize, Debug)]
pub struct AppendEntriesRequest {
pub term: u32,
pub leader_addr: SocketAddr,
pub prev_log_index: usize,
pub prev_log_term: u32,
pub entries: Vec<Entry>,
pub leader_commit: usize,
}
impl AppendEntriesRequest {
pub fn new(
term: u32,
leader_addr: SocketAddr,
prev_log_index: usize,
prev_log_term: u32,
entries: Vec<Entry>,
leader_commit: usize,
) -> AppendEntriesRequest {
AppendEntriesRequest {
term,
leader_addr,
prev_log_index,
prev_log_term,
entries,
leader_commit,
}
}
}
#[derive(PartialEq, Serialize, Deserialize, Debug)]
pub struct AppendEntriesResponse {
pub socket_addr: SocketAddr,
pub next_index: usize,
pub match_index: usize,
pub term: u32,
pub success: bool,
}
impl AppendEntriesResponse {
pub fn new(
socket_addr: SocketAddr,
next_index: usize,
match_index: usize,
term: u32,
success: bool,
) -> AppendEntriesResponse {
AppendEntriesResponse {
socket_addr,
next_index,
match_index,
term,
success,
}
}
}
#[derive(PartialEq, Serialize, Deserialize, Debug)]
pub struct RequestVoteRequest {
pub term: u32,
pub candidated_addr: SocketAddr,
pub last_log_index: usize,
pub last_log_term: u32,
}
impl RequestVoteRequest {
pub fn new(
term: u32,
candidated_addr: SocketAddr,
last_log_index: usize,
last_log_term: u32,
) -> RequestVoteRequest {
RequestVoteRequest {
term,
candidated_addr,
last_log_index,
last_log_term,
}
}
}
#[derive(PartialEq, Serialize, Deserialize, Debug)]
pub struct RequestVoteResponse {
pub term: u32,
pub vote_granted: bool,
}
impl RequestVoteResponse {
pub fn new(term: u32, vote_granted: bool) -> RequestVoteResponse {
RequestVoteResponse { term, vote_granted }
}
}
#[derive(PartialEq, Serialize, Deserialize, Debug)]
pub struct RPCMessage {
pub message: Message,
}
impl RPCMessage {
pub fn new(message: Message) -> Result<RPCMessage, Box<dyn Error>> {
Ok(RPCMessage { message })
}
pub fn from_json(json_str: String) -> Result<RPCMessage, Box<dyn Error>> {
let rpc_message: RPCMessage = serde_json::from_str(json_str.as_str())?;
Ok(rpc_message)
}
}
pub struct RPCCS {
socket: UdpSocket,
pub socket_addr: SocketAddr,
peer_list: Vec<SocketAddr>,
}
// RPC Client & Server
impl RPCCS {
pub fn new(
socket_addr: SocketAddr,
peer_list: Vec<SocketAddr>,
) -> Result<RPCCS, Box<dyn Error>> {
let socket = UdpSocket::bind(socket_addr)?;
Ok(RPCCS {
socket,
socket_addr,
peer_list,
})
}
pub fn start_listener(&self, rpc_notifier: Sender<RPCMessage>) -> Result<(), Box<dyn Error>> {
loop {
let mut buffer = [0; 1024];
let (amt, _) = match self.socket.recv_from(&mut buffer) {
Ok(pair) => pair,
Err(_) => {
error!("{} receive error", self.socket_addr.port());
(0, "127.0.0.1:8006".to_socket_addrs()?.next().unwrap())
}
};
if let Ok(msg_content) = String::from_utf8(buffer[..amt].to_vec()) {
// Handle the raw RPC request from socket buffer
let msg_parsed = RPCMessage::from_json(msg_content)?;
rpc_notifier.send(msg_parsed)?;
}
}
}
// Send request to one node in peer_list
pub fn send_to(
&self,
recv_node: SocketAddr,
message_to_send: &RPCMessage,
) -> Result<(), Box<dyn Error>> {
//recv_node: host, port
let msg_parsed = json!(message_to_send).to_string();
let buffer = msg_parsed.as_bytes();
self.socket.send_to(&buffer, recv_node)?;
Ok(())
}
// Send request to all nodes in peer_list, eg, heartbeat
pub fn send_all(&self, message_to_send: &RPCMessage) -> Result<(), Box<dyn Error>> {
for peer in &self.peer_list {
let msg_parsed = json!(message_to_send).to_string();
let buffer = msg_parsed.as_bytes();
self.socket.send_to(&buffer, peer)?;
}
Ok(())
}
}
pub struct Rpc {
pub cs: Arc<RPCCS>,
pub notifier: Option<Sender<RPCMessage>>,
pub receiver: Option<Receiver<RPCMessage>>,
}
|
#![no_std]
#![no_main]
extern crate atomic_queue;
use core::fmt::Write;
use rust_tm4c::tm4c_peripherals::get_peripherals;
use rust_tm4c::gpio;
use rust_tm4c::system_control;
use rust_tm4c::uart;
use rust_tm4c::timer;
use rust_tm4c::interrupt;
use atomic_queue::AtomicQueue;
const CPU_FREQ: u32 = 120_000_000;
const XTAL_FREQ: u32 = 25_000_000;
#[no_mangle]
pub fn main() -> ! {
let mut p = get_peripherals();
let scb = p.take_scb().unwrap();
let nvic = p.take_nvic().unwrap();
let sysctl = p.take_system_control().unwrap();
let gpion = p.take_gpion().unwrap();
let gpioa = p.take_gpioa().unwrap();
let mut uart0 = p.take_uart0().unwrap();
let timer0 = p.take_timer0().unwrap();
let timer1 = p.take_timer1().unwrap();
// Configure the CPU for the maximum operating frequency
let cpu_freq = sysctl.tm4c129_config_sysclk(CPU_FREQ, XTAL_FREQ);
// Set up LEDs
sysctl.enable_gpio_clock(system_control::GpioPort::GpioN);
gpion.configure_as_output(gpio::Pin::Pin0);
gpion.configure_as_output(gpio::Pin::Pin1);
unsafe { GPIO_BLOCK = Some(gpion); }
// Set up the debug UART
sysctl.enable_gpio_clock(system_control::GpioPort::GpioA);
sysctl.enable_uart_clock(system_control::Uart::Uart0);
gpioa.select_alternate_function(gpio::Pin::Pin0, 1);
gpioa.select_alternate_function(gpio::Pin::Pin1, 1);
let _baud = uart0
.configure(
CPU_FREQ,
115200,
uart::Parity::None,
uart::StopBits::One,
uart::WordLength::Eight,
)
.unwrap();
let mut uart_driver = uart::drivers::UartBlockingDriver::new(&mut uart0, uart::drivers::NewlineMode::CRLF);
// Set up timers to trigger at slightly different frequencies
sysctl.enable_timer_clock(system_control::Timer::Timer0);
sysctl.enable_timer_clock(system_control::Timer::Timer1);
match timer0.set_periodic_mode_32bit(10000) { _ => {} }; // Timer 1 should trigger first, since it's a lower priority
match timer1.set_periodic_mode_32bit(10001) { _ => {} };
// Set up interrupts
scb.int_register(interrupt::IntType::Timer0A, timer0a_handler);
scb.int_register(interrupt::IntType::Timer1A, timer1a_handler);
nvic.clear_pending(interrupt::IntType::Timer0A);
nvic.clear_pending(interrupt::IntType::Timer1A);
nvic.set_priority(interrupt::IntType::Timer0A, 1); // Make timer 0 a lower priority so it can be preempted
nvic.set_priority(interrupt::IntType::Timer1A, 0);
nvic.enable(interrupt::IntType::Timer0A);
nvic.enable(interrupt::IntType::Timer1A);
// Create the queue
let mut storage: [u8; 16] = [0; 16];
let ref queue: AtomicQueue<u8> = {
let m = AtomicQueue::new(&mut storage);
m
};
// Fill the first two slots in the queue with dummy variables
match queue.push(0) {
Err(_) => panic!("No room to push?"),
Ok(_) => {},
}
match queue.push(0) {
Err(_) => panic!("No room to push?"),
Ok(_) => {},
}
// Give the timer interrupts access to the timers
unsafe { TIMER0 = Some(timer0); }
unsafe { TIMER1 = Some(timer1); }
let mut counter = 0_u8;
loop {
writeln!(uart_driver, "Hello, world! counter={} two_values_ago={}", counter, queue.pop().unwrap()).unwrap();
match queue.push(counter) {
Err(_) => panic!("No room to push?"),
Ok(_) => {},
}
counter = counter.wrapping_add(1);
}
}
static mut TIMER0: Option<&'static mut timer::Timer> = None;
pub unsafe extern "C" fn timer0a_handler() {
if let Some(t) = &mut TIMER_BLOCK {
match t.clear_timeout_interrupt_32bit() {
_ => {},
}
}
}
static mut TIMER1: Option<&'static mut timer::Timer> = None;
pub unsafe extern "C" fn timer1a_handler() {
if let Some(t) = &mut TIMER1 {
match t.clear_timeout_interrupt_32bit() {
_ => {},
}
}
}
|
//! Parameters used by the VM.
#[cfg(not(feature = "std"))]
use alloc::vec::Vec;
#[cfg(not(feature = "std"))]
use alloc::rc::Rc;
#[cfg(feature = "std")]
use std::rc::Rc;
use bigint::{Address, Gas, U256};
#[cfg(feature = "std")]
use block::Header;
#[derive(Debug, Clone)]
/// Block header.
pub struct HeaderParams {
/// Block coinbase, the address that mines the block.
pub beneficiary: Address,
/// Block timestamp.
pub timestamp: u64,
/// The current block number.
pub number: U256,
/// Difficulty of the block.
pub difficulty: U256,
/// Total block gas limit.
pub gas_limit: Gas,
}
#[cfg(feature = "std")]
impl<'a> From<&'a Header> for HeaderParams {
fn from(val: &'a Header) -> HeaderParams {
HeaderParams {
beneficiary: val.beneficiary,
timestamp: val.timestamp,
number: val.number,
difficulty: val.difficulty,
gas_limit: val.gas_limit,
}
}
}
#[derive(Debug, Clone)]
/// A VM context. See the Yellow Paper for more information.
pub struct Context {
/// Address that is executing this runtime.
pub address: Address,
/// Caller of the runtime.
pub caller: Address,
/// Actual call target. The same as address unless for CALLCODE.
pub callee: Address,
/// Code to be executed.
pub code: Rc<Vec<u8>>,
/// Data associated with this execution.
pub data: Rc<Vec<u8>>,
/// Gas limit.
pub gas_limit: Gas,
/// Gas price.
pub gas_price: Gas,
/// The origin of the context. The same as caller when it is from
/// a transaction.
pub origin: Address,
/// Value passed for this runtime.
pub value: U256,
/// Apprent value in the execution context.
pub apprent_value: U256,
/// Whether this represents a system call.
pub is_system: bool,
/// Whether this call is static.
pub is_static: bool,
}
pub use block_core::Log;
|
use std::time::Duration;
use bevy::{
diagnostic::{
Diagnostic, DiagnosticId, Diagnostics, FrameTimeDiagnosticsPlugin, LogDiagnosticsPlugin,
},
prelude::*,
render::camera::Camera,
};
use bevy_text_mesh::prelude::*;
use rand::prelude::*;
// NOTE! Custom (unlit) material used
// tessellation quality
const MESH_QUALITY: Quality = Quality::Low;
// how often new texts are spawned
const TEXT_SPAWN_INTERVAL: u64 = 125;
// how often spawned texts are updated
const TEXT_UPDATE_INTERVAL_MS: u64 = 1;
// initial wait time before starting spawn
const INITIAL_WAIT_MS: u64 = 500;
fn main() {
App::new()
.insert_resource(Msaa { samples: 4 })
.add_plugins(DefaultPlugins)
.add_plugin(TextMeshPlugin)
.add_plugin(FrameTimeDiagnosticsPlugin::default())
.add_plugin(LogDiagnosticsPlugin::default())
.add_startup_system(setup.system())
.add_startup_system(setup_text_mesh.system())
.add_system(spawn_meshes.system())
.add_system(update_text_mesh.system())
.add_system(rotate_camera.system())
.add_system_to_stage(CoreStage::PostUpdate, update_frame_rate.system())
.run();
}
struct SceneState {
font: Handle<TextMeshFont>,
material: Handle<StandardMaterial>,
text_count: usize,
text_update_count: usize,
}
struct UpdateTimer {
spawn_new_text_timer: Timer,
fps_update_timer: Timer,
text_update_timer: Timer,
}
struct EngineTime;
struct FPS;
struct TextCount;
pub const TEXT_MESH_UPDATES: DiagnosticId =
DiagnosticId::from_u128(1082410928401928501928509128509125);
fn setup_text_mesh(
mut diagnostics: ResMut<Diagnostics>,
mut materials: ResMut<Assets<StandardMaterial>>,
mut commands: Commands,
asset_server: Res<AssetServer>,
) {
diagnostics.add(Diagnostic::new(TEXT_MESH_UPDATES, "text_mesh_updates", 20));
let state = SceneState {
font: asset_server.load("fonts/FiraMono-Medium.ttf"),
text_count: 0,
text_update_count: 0,
material: materials.add(StandardMaterial {
base_color: Color::BLACK,
unlit: true,
..Default::default()
}),
};
commands
.spawn_bundle(TextMeshBundle {
text_mesh: TextMesh {
text: String::from("FPS"),
style: TextMeshStyle {
font: state.font.clone(),
color: Color::rgb(0., 1., 0.),
font_size: SizeUnit::NonStandard(48.),
..Default::default()
},
..Default::default()
},
transform: Transform::from_xyz(0.0, 2.5, 0.),
..Default::default()
})
.insert(FPS);
commands
.spawn_bundle(TextMeshBundle {
text_mesh: TextMesh {
text: String::from("Text count"),
style: TextMeshStyle {
font: state.font.clone(),
font_size: SizeUnit::NonStandard(18.),
color: Color::rgb(1., 1., 1.),
..Default::default()
},
..Default::default()
},
transform: Transform::from_xyz(0., 3., 0.),
..Default::default()
})
.insert(TextCount);
commands.insert_resource(state);
commands.insert_resource(UpdateTimer {
spawn_new_text_timer: Timer::new(Duration::from_millis(INITIAL_WAIT_MS), false),
text_update_timer: Timer::new(Duration::from_millis(TEXT_UPDATE_INTERVAL_MS), true),
// how often FPS text is updated
fps_update_timer: Timer::new(Duration::from_millis(150), true),
});
}
fn spawn_meshes(
mut commands: Commands,
mut state: ResMut<SceneState>,
time: Res<Time>,
mut timer: ResMut<UpdateTimer>,
) {
if timer
.spawn_new_text_timer
.tick(time.delta())
.just_finished()
{
timer.spawn_new_text_timer = Timer::new(Duration::from_millis(TEXT_SPAWN_INTERVAL), false);
let mut rng = rand::thread_rng(); // how performant is this?
let transform = Transform {
translation: Vec3::new(
rng.gen_range(-1.0..1.0) * 2.0,
rng.gen::<f32>() * 2.0,
rng.gen_range(-1.0..1.0) * 2.0,
),
scale: Vec3::ONE * (1. - rng.gen::<f32>() * 0.8) * 0.5,
..Default::default()
}
.looking_at(
Vec3::new(rng.gen::<f32>(), rng.gen::<f32>(), rng.gen::<f32>()),
Vec3::Y,
);
commands
.spawn_bundle(TextMeshBundle {
text_mesh: TextMesh {
text: String::from(""),
style: TextMeshStyle {
font: state.font.clone(),
mesh_quality: MESH_QUALITY,
..Default::default()
},
..Default::default()
},
transform,
..Default::default()
})
.insert(EngineTime)
.insert(state.material.clone());
state.text_count += 1;
}
}
fn update_text_mesh(
mut diagnostics: ResMut<Diagnostics>,
mut text_meshes: Query<&mut TextMesh, With<EngineTime>>,
time: Res<Time>,
mut timer: ResMut<UpdateTimer>,
mut state: ResMut<SceneState>,
) {
let mut update_count = 0;
if timer.text_update_timer.tick(time.delta()).just_finished() {
for mut text_mesh in text_meshes.iter_mut() {
let updated_text = String::from(format!("Time = {:.3}", time.seconds_since_startup()));
if text_mesh.text != updated_text {
text_mesh.text = updated_text;
update_count += 1;
}
}
}
state.text_update_count += update_count;
diagnostics.add_measurement(TEXT_MESH_UPDATES, state.text_update_count as f64);
}
fn rotate_camera(mut camera: Query<&mut Transform, With<Camera>>, time: Res<Time>) {
for mut camera in camera.iter_mut() {
let angle = time.seconds_since_startup() as f32 / 2. + 1.55 * std::f32::consts::PI;
let distance = 6.5;
camera.translation = Vec3::new(
angle.sin() as f32 * distance,
camera.translation.y,
angle.cos() as f32 * distance,
);
*camera = camera.looking_at(Vec3::new(0.0, 1.5, 0.), Vec3::Y);
}
}
fn update_frame_rate(
diagnostics: Res<Diagnostics>,
time: Res<Time>,
mut timer: ResMut<UpdateTimer>,
mut fps_text: Query<(Entity, &mut TextMesh, Option<&FPS>), Or<(With<FPS>, With<TextCount>)>>,
camera_entity: Query<Entity, With<Camera>>,
mut transform_query: Query<&mut Transform>,
state: Res<SceneState>,
) {
for (text_mesh_entity, mut text_mesh, fps) in fps_text.iter_mut() {
if timer.fps_update_timer.tick(time.delta()).just_finished() {
if fps.is_some() {
let fps = diagnostics
.get_measurement(FrameTimeDiagnosticsPlugin::FPS)
.unwrap();
text_mesh.text = format!("FPS={}", fps.value.round() as usize);
} else {
text_mesh.text = format!("{} text items", state.text_count);
}
}
let camera_entity = camera_entity.iter().next().unwrap();
let camera_transform = transform_query.get_mut(camera_entity).unwrap().clone();
let mut transform = transform_query.get_mut(text_mesh_entity).unwrap();
// eh - why negative?
*transform = transform.looking_at(-camera_transform.translation, Vec3::Y);
}
}
/// set up a simple 3D scene
fn setup(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
commands.spawn_bundle(PbrBundle {
mesh: meshes.add(Mesh::from(shape::Plane { size: 5.0 })),
material: materials.add(Color::rgb(0.3, 0.5, 0.3).into()),
..Default::default()
});
commands.spawn_bundle(PointLightBundle {
transform: Transform::from_xyz(4.0, 8.0, 4.0),
..Default::default()
});
commands.spawn_bundle(PerspectiveCameraBundle {
transform: Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y),
..Default::default()
});
}
|
//! Duration
use serde::{Deserialize, Serialize};
/// Duration : A pair consisting of length of time and the unit of time
/// measured. It is the atomic unit from which all duration literals are
/// composed.
#[derive(Clone, Debug, PartialEq, Eq, Default, Serialize, Deserialize)]
pub struct Duration {
/// Type of AST node
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
pub r#type: Option<String>,
/// Duration Magnitude
#[serde(skip_serializing_if = "Option::is_none")]
pub magnitude: Option<i32>,
/// Duration unit
#[serde(skip_serializing_if = "Option::is_none")]
pub unit: Option<String>,
}
impl Duration {
/// A pair consisting of length of time and the unit of time measured. It is
/// the atomic unit from which all duration literals are composed.
pub fn new() -> Self {
Self::default()
}
}
|
use test_winrt_interfaces::*;
use windows::core::*;
use Component::Interfaces::*;
use Windows::Win32::Foundation::E_NOINTERFACE;
#[implement(Component::Interfaces::IProperty)]
struct Property(i32);
#[allow(non_snake_case)]
impl Property {
fn Property(&self) -> Result<i32> {
Ok(self.0)
}
fn SetProperty(&mut self, value: i32) -> Result<()> {
self.0 = value;
Ok(())
}
}
#[test]
fn test() -> Result<()> {
let p: IProperty = Property(0).into();
assert!(p.Property()? == 0);
p.SetProperty(123)?;
assert!(p.Property()? == 123);
Ok(())
}
#[test]
fn required() -> Result<()> {
let good: IRequires = Test::GoodRequires()?;
good.Requires()?;
good.Required()?;
let bad: IRequires = Test::BadRequires()?;
bad.Requires()?;
// Even though IRequires requires IRequired, this "bad" implementation doesn't implement the latter.
// This test ensures that this failure bubbles up properly and can be handled. This also and mainly
// validates feature detection can be used to make use of versioned APIs gracefully.
assert!(bad.Required().unwrap_err().code() == E_NOINTERFACE);
Ok(())
}
|
use std::fs::File;
use std::io::prelude::*;
use std::io;
use std::io::Write as iow;
use std::fmt::Write;
use std::collections::HashMap;
use time;
//Takes a string that contains the whole contents of
//the accounts description file and returns a vector
//of tuples containing the (acct nr , name). Vec was used
//to preserve ordering
fn parse_accts_desc(file_contents: &mut String) -> Vec<(i32, String)>{
let mut accts: Vec<&str> = file_contents.split('\n').collect();
let mut results: Vec<(i32, String)> = Vec::new();
for i in accts.split_off(1){
let entry: Vec<&str> = i.split(';').collect();
let num: i32 = entry[0].parse().unwrap();
results.push((num, entry[1].to_string()));
}
results
}
//Parses the account journal file and
//returns an ordered vector of tuples
// being (acct nr, date, debit, credit).
fn parse_acct_journal(file_contents: &mut String) ->Vec<(i32, time::Tm, i32, i32)>{
let mut trans : Vec<&str> = file_contents.split('\n').collect();
let mut results: Vec<(i32, time::Tm, i32, i32)> = Vec::new();
for i in trans.split_off(1) {
let entry: Vec<&str> = i.split(';').collect();
let acct_nr : i32 = entry[0].parse().unwrap();
let date: time::Tm = time::strptime(entry[1], "%b-%y").unwrap();
let debit : i32 = entry[2].parse().unwrap();
let credit: i32 = entry[3].parse().unwrap();
results.push((acct_nr, date, debit, credit));
}
results
}
//Verifies that for each account, the journal is balanced.
//Balanced means that for the whole journal,
// total debits = total credits
fn verify_journal_balanced(journal: &Vec<(i32,time::Tm,i32,i32)>) -> Result<&str, &str>{
let mut balance : i32 = 0;
for entry in journal{
balance -= entry.2;
balance += entry.3;
}
if balance == 0 {
return Ok("Journal Balanced");
}
Err("Journal not balanced")
}
//get total debits from account journal
//if acctnr=-1, get total debits for entire journal, otherwise
//get the total debits for the acctnr specified
fn total_debits(journal: &Vec<(i32, time::Tm, i32, i32)>, acctnr: i32, start: &time::Tm, end: &time::Tm) -> i32{
let mut debit: i32 = 0;
if acctnr >=0{
for entry in journal {
if acctnr == entry.0 {
if entry.1.tm_year >= start.tm_year && entry.1.tm_year <= end.tm_year{
if entry.1.tm_mon >= start.tm_mon && entry.1.tm_mon <= end.tm_mon{
debit += entry.2;
}
}
}
}
} else {
for entry in journal{
if entry.1.tm_year >= start.tm_year && entry.1.tm_year <= end.tm_year{
if entry.1.tm_mon >= start.tm_mon && entry.1.tm_mon <= end.tm_mon{
debit += entry.2;
}
}
}
}
debit
}
//get total credits from account journal
fn total_credits(journal: &Vec<(i32, time::Tm, i32, i32)>, acctnr: i32, start: &time::Tm, end: &time::Tm) -> i32{
let mut credit: i32 = 0;
if acctnr >=0{
for entry in journal {
if acctnr == entry.0 {
if entry.1.tm_year >= start.tm_year && entry.1.tm_year <= end.tm_year{
if entry.1.tm_mon >= start.tm_mon && entry.1.tm_mon <= end.tm_mon{
credit += entry.3;
}
}
}
}
} else {
for entry in journal{
if entry.1.tm_year >= start.tm_year && entry.1.tm_year <= end.tm_year{
if entry.1.tm_mon >= start.tm_mon && entry.1.tm_mon <= end.tm_mon{
credit += entry.3;
}
}
}
}
credit
}
//Process the user input string to get the parameters for what
//to print to the screen. input is first acct, last acct, first period,
//last period, and format. Format can be "TEXT" or "CSV"
fn process_user_input(input: &String, journal: &Vec<(i32, time::Tm, i32, i32)>, acct_list: &Vec<(i32, String)> ) -> (i32,i32,time::Tm, time::Tm, String){
let fields: Vec<&str> = input.split(' ').collect();
let start_acct;
let end_acct ;
let start_period: time::Tm;
let end_period: time::Tm ;
if fields[0].to_string() == "*"{
start_acct = acct_list[0].0;
} else {
let mut st_acct = fields[0].to_string();
//pad zeros as needed until 4 digits
while st_acct.len()<4{
st_acct.push('0');
}
start_acct = st_acct.parse().unwrap();
}
if fields[1].to_string() == "*"{
end_acct = acct_list[acct_list.len() - 1].0;
} else {
let mut e_acct = fields[1].to_string();
//pad zeros as needed until 4 digits
while e_acct.len()<4{
e_acct.push('0');
}
end_acct = e_acct.parse().unwrap();
}
if fields[2].to_string() == "*"{
start_period = journal[0].1
} else{
//parse as a time object %b-%y
start_period = time::strptime(fields[2], "%b-%y").unwrap();
}
if fields[3].to_string() == "*"{
end_period = journal[journal.len()-1].1;
} else{
end_period = time::strptime(fields[3], "%b-%y").unwrap() ;
}
(start_acct, end_acct, start_period, end_period, fields[4].to_string())
}
//convenience method for printing data in the proper
//column width. Pads spaces to the end of the string
//to meet the total column size desired
fn pad_column(text: &str, size: usize ) -> String {
let mut result = String::from(text);
loop {
if result.len()>= size{
break;
}
result.push(' ');
}
result
}
///appends the title line of the sheet to the buffer. the type parameter
///specifies whether it is the text version or the csv version
fn append_title_line(buffer: &mut String, style: &String) {
match style.trim(){
"TEXT" => {
buffer.push_str("ACCOUNT |DESCRIPTION | DEBIT| CREDIT| BALANCE|\n");
buffer.push_str("-----------------------------------------------------------------------------------------------------------------\n");
},
_ => {
buffer.push_str("ACCOUNT;DESCRIPTION;DEBIT;CREDIT;BALANCE;\n");
}
};
}
///returns a boolean based on whether the
///target date is between start and end date inclusive
fn date_in_range(target: &time::Tm, start: &time::Tm, end: &time::Tm) -> bool {
if target.tm_year >= start.tm_year && target.tm_year <= end.tm_year{
if target.tm_mon >= start.tm_mon && target.tm_mon <= end.tm_mon{
return true;
}
}
false
}
///appends a new string to the buffer on the same line in the appropriate format
///if text format, automatically pads the field to be 20 chars long.
fn append_data(buffer: &mut String, data: &String, style: &String){
match style.trim(){
"TEXT" => {
buffer.push_str(&pad_column(&data, 20 as usize));
},
_ => {
buffer.push_str(&data);
}
};
}
//prints the output of the user requested data
//in the appropriate Text format to stdout
fn text_output(input: &(i32,i32,time::Tm, time::Tm, String), journal: &Vec<(i32, time::Tm, i32, i32)>, acct_list: &Vec<(i32, String)>){
//As a convenience method, create a hash map to look up descriptions for acct numbers in constant time
let mut acct_map: HashMap<i32, String> = HashMap::new();
for acct in acct_list{
acct_map.insert(acct.clone().0, acct.clone().1);
}
//store periods in convienence vars
let start = input.2;
let end = input.3;
//track displayed accounts and debit/credit totals
let mut displayed_accts: Vec<i32> = Vec::new();
let mut acct_debits : i32 = 0;
let mut acct_credits : i32 = 0;
//set separator string
let separator = match input.4.trim(){
"TEXT" => {
String::from(" | ")
},
_=> {
String::from(";")
}
};
let mut output = String::new();
write!(&mut output, "Balance from account {} to {} from period {} to {}\n\n\n", input.0, input.1, time::strftime("%b-%y", &input.2).unwrap(),
time::strftime("%b-%y", &input.3).unwrap() ).unwrap();
output.push('\n');
output.push_str("Balance: \n");
append_title_line(&mut output, &input.4);
for entry in journal{
//check if acct number is in range
if entry.0 >= input.0 && entry.0 <= input.1{
//check to make sure we haven't already displayed this account
if ! displayed_accts.contains(&entry.0){
// check if date is in range
if date_in_range(&entry.1, &start, &end){
append_data(&mut output, &entry.0.to_string(), &input.4 );
output.push_str(&separator);
match acct_map.get(&entry.0){
Some(desc) => {
append_data(&mut output, &desc, &input.4);
output.push_str(&separator);
},
None => {
}
};
let debits = total_debits(&journal, entry.0, &start, &end );
let credits = total_credits(&journal, entry.0, &start, &end );
append_data(&mut output, &debits.to_string(), &input.4);
output.push_str(&separator);
append_data(&mut output, &credits.to_string(), &input.4);
output.push_str(&separator);
append_data(&mut output, &(debits-credits).to_string(), &input.4);
output.push_str(separator.trim());
output.push_str("\n");
//track that we already displayed this account
displayed_accts.push(entry.0);
acct_debits += debits;
acct_credits += credits;
}
}
}
}
//append totals line
append_data(&mut output, &"TOTAL".to_string(), &input.4);
output.push_str(&separator);
append_data(&mut output, &" ".to_string(), &input.4);
output.push_str(&separator);
append_data(&mut output, &mut acct_debits.to_string(), &input.4);
output.push_str(&separator);
append_data(&mut output, &acct_credits.to_string(), &input.4);
output.push_str(&separator);
append_data(&mut output, &(acct_debits-acct_credits).to_string(), &input.4);
output.push_str(separator.trim());
output.push_str("\n");
println!("{}",output);
}
///Solution. Read input account journal and account description list,
///prompt user for input string, display output based on user input.
pub fn solution(){
//read and parse the account journal file
let mut file = match File::open("input/little_accountant.input.1") {
Ok(f) => f ,
Err(e) => {
println!("{}", e);
return ;
}
};
let mut acct_journal = String::new();
match file.read_to_string(&mut acct_journal){
Ok(_) => {
//continue
},
Err(e) => {
println!("{}", e);
return ; //TODO return something like a result instead
}
};
let journal = parse_acct_journal(&mut acct_journal);
//read and parse the account description file
let mut file = match File::open("input/little_accountant.input.2") {
Ok(f) => f,
Err(e) => {
println!("{}",e);
return ;
}
};
let mut accts = String::new();
match file.read_to_string(&mut accts ){
Ok(_) => {
//continue
},
Err(e) => {
println!("{}", e);
return ;
}
};
let accts = parse_accts_desc(&mut accts);
//verify account journal balanced
match verify_journal_balanced(& journal) {
Ok(_) => {
//continue
},
Err(e) => {
println!("{}",e);
return;
}
}
//read and process user input
let mut user_input = String::new();
print!("> ");
io::stdout().flush().unwrap();
io::stdin().read_line(&mut user_input).unwrap();
let user_input = process_user_input(&user_input, &journal, &accts);
//print totals for journal
println!("Total debits: {} Total Credits: {}", total_debits(&journal, -1, &time::strptime( "Jan-01", "%b-%y").unwrap(), &time::strptime("Dec-99", "%b-%y").unwrap()),
total_credits(&journal, -1, &time::strptime("Jan-01","%b-%y").unwrap(), &time::strptime("Dec-99","%b-%y").unwrap()));
text_output(&user_input, &journal, & accts);
} |
#![no_std]
#![feature(asm)]
#![deny(warnings, unused_must_use)]
#[macro_use]
extern crate alloc;
#[macro_use]
extern crate log;
use {
alloc::{boxed::Box, sync::Arc, vec::Vec},
core::{future::Future, pin::Pin},
xmas_elf::ElfFile,
zircon_object::{dev::*, ipc::*, object::*, task::*, util::elf_loader::*, vm::*},
zircon_syscall::Syscall,
};
mod kcounter;
// These describe userboot itself
const K_PROC_SELF: usize = 0;
const K_VMARROOT_SELF: usize = 1;
// Essential job and resource handles
const K_ROOTJOB: usize = 2;
const K_ROOTRESOURCE: usize = 3;
// Essential VMO handles
const K_ZBI: usize = 4;
const K_FIRSTVDSO: usize = 5;
const K_CRASHLOG: usize = 8;
const K_COUNTERNAMES: usize = 9;
const K_COUNTERS: usize = 10;
const K_FISTINSTRUMENTATIONDATA: usize = 11;
const K_HANDLECOUNT: usize = 15;
/// Program images to run.
pub struct Images<T: AsRef<[u8]>> {
pub userboot: T,
pub vdso: T,
pub zbi: T,
}
pub fn run_userboot(images: &Images<impl AsRef<[u8]>>, cmdline: &str) -> Arc<Process> {
let job = Job::root();
let proc = Process::create(&job, "userboot").unwrap();
let thread = Thread::create(&proc, "userboot").unwrap();
let resource = Resource::create(
"root",
ResourceKind::ROOT,
0,
0x1_0000_0000,
ResourceFlags::empty(),
);
let vmar = proc.vmar();
// userboot
let (entry, userboot_size) = {
let elf = ElfFile::new(images.userboot.as_ref()).unwrap();
let size = elf.load_segment_size();
let vmar = vmar
.allocate(None, size, VmarFlags::CAN_MAP_RXW, PAGE_SIZE)
.unwrap();
vmar.load_from_elf(&elf).unwrap();
(vmar.addr() + elf.header.pt2.entry_point() as usize, size)
};
// vdso
let vdso_vmo = {
let elf = ElfFile::new(images.vdso.as_ref()).unwrap();
let vdso_vmo = VmObject::new_paged(images.vdso.as_ref().len() / PAGE_SIZE + 1);
vdso_vmo.write(0, images.vdso.as_ref()).unwrap();
let size = elf.load_segment_size();
let vmar = vmar
.allocate_at(
userboot_size,
size,
VmarFlags::CAN_MAP_RXW | VmarFlags::SPECIFIC,
PAGE_SIZE,
)
.unwrap();
vmar.map_from_elf(&elf, vdso_vmo.clone()).unwrap();
#[cfg(feature = "std")]
{
let offset = elf
.get_symbol_address("zcore_syscall_entry")
.expect("failed to locate syscall entry") as usize;
let syscall_entry = &(kernel_hal_unix::syscall_entry as usize).to_ne_bytes();
// fill syscall entry x3
vdso_vmo.write(offset, syscall_entry).unwrap();
vdso_vmo.write(offset + 8, syscall_entry).unwrap();
vdso_vmo.write(offset + 16, syscall_entry).unwrap();
}
vdso_vmo
};
// zbi
let zbi_vmo = {
let vmo = VmObject::new_paged(images.zbi.as_ref().len() / PAGE_SIZE + 1);
vmo.write(0, images.zbi.as_ref()).unwrap();
vmo.set_name("zbi");
vmo
};
// stack
const STACK_PAGES: usize = 8;
let stack_vmo = VmObject::new_paged(STACK_PAGES);
let flags = MMUFlags::READ | MMUFlags::WRITE | MMUFlags::USER;
let stack_bottom = vmar
.map(None, stack_vmo.clone(), 0, stack_vmo.len(), flags)
.unwrap();
#[cfg(target_arch = "x86_64")]
// WARN: align stack to 16B, then emulate a 'call' (push rip)
let sp = stack_bottom + stack_vmo.len() - 8;
#[cfg(target_arch = "aarch64")]
let sp = stack_bottom + stack_vmo.len();
// channel
let (user_channel, kernel_channel) = Channel::create();
let handle = Handle::new(user_channel, Rights::DEFAULT_CHANNEL);
let mut handles = vec![Handle::new(proc.clone(), Rights::empty()); K_HANDLECOUNT];
handles[K_PROC_SELF] = Handle::new(proc.clone(), Rights::DEFAULT_PROCESS);
handles[K_VMARROOT_SELF] = Handle::new(proc.vmar(), Rights::DEFAULT_VMAR | Rights::IO);
handles[K_ROOTJOB] = Handle::new(job, Rights::DEFAULT_JOB);
handles[K_ROOTRESOURCE] = Handle::new(resource, Rights::DEFAULT_RESOURCE);
handles[K_ZBI] = Handle::new(zbi_vmo, Rights::DEFAULT_VMO);
// set up handles[K_FIRSTVDSO..K_LASTVDSO + 1]
const VDSO_DATA_CONSTANTS: usize = 0x4a50;
const VDSO_DATA_CONSTANTS_SIZE: usize = 0x78;
let constants: [u8; VDSO_DATA_CONSTANTS_SIZE] =
unsafe { core::mem::transmute(kernel_hal::vdso_constants()) };
vdso_vmo.write(VDSO_DATA_CONSTANTS, &constants).unwrap();
vdso_vmo.set_name("vdso/full");
let vdso_test1 = vdso_vmo.create_child(false, 0, vdso_vmo.len()).unwrap();
vdso_test1.set_name("vdso/test1");
let vdso_test2 = vdso_vmo.create_child(false, 0, vdso_vmo.len()).unwrap();
vdso_test2.set_name("vdso/test2");
handles[K_FIRSTVDSO] = Handle::new(vdso_vmo, Rights::DEFAULT_VMO | Rights::EXECUTE);
handles[K_FIRSTVDSO + 1] = Handle::new(vdso_test1, Rights::DEFAULT_VMO | Rights::EXECUTE);
handles[K_FIRSTVDSO + 2] = Handle::new(vdso_test2, Rights::DEFAULT_VMO | Rights::EXECUTE);
// TODO: use correct CrashLogVmo handle
let crash_log_vmo = VmObject::new_paged(1);
crash_log_vmo.set_name("crashlog");
handles[K_CRASHLOG] = Handle::new(crash_log_vmo, Rights::DEFAULT_VMO);
let (counter_name_vmo, kcounters_vmo) = kcounter::create_kcounter_vmo();
handles[K_COUNTERNAMES] = Handle::new(counter_name_vmo, Rights::DEFAULT_VMO);
handles[K_COUNTERS] = Handle::new(kcounters_vmo, Rights::DEFAULT_VMO);
// TODO: use correct Instrumentation data handle
let instrumentation_data_vmo = VmObject::new_paged(0);
instrumentation_data_vmo.set_name("UNIMPLEMENTED_VMO");
handles[K_FISTINSTRUMENTATIONDATA] =
Handle::new(instrumentation_data_vmo.clone(), Rights::DEFAULT_VMO);
handles[K_FISTINSTRUMENTATIONDATA + 1] =
Handle::new(instrumentation_data_vmo.clone(), Rights::DEFAULT_VMO);
handles[K_FISTINSTRUMENTATIONDATA + 2] =
Handle::new(instrumentation_data_vmo.clone(), Rights::DEFAULT_VMO);
handles[K_FISTINSTRUMENTATIONDATA + 3] =
Handle::new(instrumentation_data_vmo, Rights::DEFAULT_VMO);
// check: handle to root proc should be only
let data = Vec::from(cmdline.replace(':', "\0") + "\0");
let msg = MessagePacket { data, handles };
kernel_channel.write(msg).unwrap();
proc.start(&thread, entry, sp, Some(handle), 0, thread_fn)
.expect("failed to start main thread");
proc
}
kcounter!(EXCEPTIONS_USER, "exceptions.user");
kcounter!(EXCEPTIONS_TIMER, "exceptions.timer");
kcounter!(EXCEPTIONS_PGFAULT, "exceptions.pgfault");
async fn new_thread(thread: CurrentThread) {
kernel_hal::Thread::set_tid(thread.id(), thread.proc().id());
if thread.is_first_thread() {
thread
.handle_exception(ExceptionType::ProcessStarting)
.await;
};
thread.handle_exception(ExceptionType::ThreadStarting).await;
loop {
let mut cx = thread.wait_for_run().await;
if thread.state() == ThreadState::Dying {
break;
}
trace!("go to user: {:#x?}", cx);
debug!("switch to {}|{}", thread.proc().name(), thread.name());
let tmp_time = kernel_hal::timer_now().as_nanos();
// * Attention
// The code will enter a magic zone from here.
// `context run` will be executed into a wrapped library where context switching takes place.
// The details are available in the trapframe crate on crates.io.
kernel_hal::context_run(&mut cx);
// Back from the userspace
let time = kernel_hal::timer_now().as_nanos() - tmp_time;
thread.time_add(time);
trace!("back from user: {:#x?}", cx);
EXCEPTIONS_USER.add(1);
let trap_num = cx.trap_num;
#[cfg(target_arch = "x86_64")]
let error_code = cx.error_code;
thread.end_running(cx);
#[cfg(target_arch = "aarch64")]
match trap_num {
0 => handle_syscall(&thread).await,
_ => unimplemented!(),
}
#[cfg(target_arch = "x86_64")]
match trap_num {
0x100 => handle_syscall(&thread).await,
0x20..=0x3f => {
kernel_hal::InterruptManager::handle(trap_num as u8);
if trap_num == 0x20 {
EXCEPTIONS_TIMER.add(1);
kernel_hal::yield_now().await;
}
}
0xe => {
EXCEPTIONS_PGFAULT.add(1);
let mut flags = MMUFlags::empty();
if error_code & 0x01 != 0 {
flags.insert(MMUFlags::READ)
}
if error_code & 0x02 != 0 {
flags.insert(MMUFlags::WRITE)
}
if error_code & 0x04 != 0 {
flags.insert(MMUFlags::USER)
}
if error_code & 0x08 != 0 {
warn!("page table entry has reserved bits set!")
}
if error_code & 0x10 != 0 {
flags.insert(MMUFlags::EXECUTE)
}
let fault_vaddr = kernel_hal::fetch_fault_vaddr();
info!(
"page fault from user mode {:#x} {:#x?} {:?}",
fault_vaddr, error_code, flags
);
let vmar = thread.proc().vmar();
if let Err(err) = vmar.handle_page_fault(fault_vaddr, flags) {
error!("handle_page_fault error: {:?}", err);
thread.handle_exception(ExceptionType::FatalPageFault).await;
}
}
0x8 => thread.with_context(|cx| {
panic!("Double fault from user mode! {:#x?}", cx);
}),
num => {
let type_ = match num {
0x1 => ExceptionType::HardwareBreakpoint,
0x3 => ExceptionType::SoftwareBreakpoint,
0x6 => ExceptionType::UndefinedInstruction,
0x17 => ExceptionType::UnalignedAccess,
_ => ExceptionType::General,
};
thread.handle_exception(type_).await;
}
}
}
thread.handle_exception(ExceptionType::ThreadExiting).await;
}
fn thread_fn(thread: CurrentThread) -> Pin<Box<dyn Future<Output = ()> + Send + 'static>> {
Box::pin(new_thread(thread))
}
async fn handle_syscall(thread: &CurrentThread) {
let (num, args) = thread.with_context(|cx| {
let regs = cx.general;
#[cfg(target_arch = "x86_64")]
let num = regs.rax as u32;
#[cfg(target_arch = "aarch64")]
let num = regs.x16 as u32;
// LibOS: Function call ABI
#[cfg(feature = "std")]
#[cfg(target_arch = "x86_64")]
let args = unsafe {
let a6 = (regs.rsp as *const usize).read();
let a7 = (regs.rsp as *const usize).add(1).read();
[
regs.rdi, regs.rsi, regs.rdx, regs.rcx, regs.r8, regs.r9, a6, a7,
]
};
// RealOS: Zircon syscall ABI
#[cfg(not(feature = "std"))]
#[cfg(target_arch = "x86_64")]
let args = [
regs.rdi, regs.rsi, regs.rdx, regs.r10, regs.r8, regs.r9, regs.r12, regs.r13,
];
// ARM64
#[cfg(target_arch = "aarch64")]
let args = [
regs.x0, regs.x1, regs.x2, regs.x3, regs.x4, regs.x5, regs.x6, regs.x7,
];
(num, args)
});
let mut syscall = Syscall { thread, thread_fn };
let ret = syscall.syscall(num, args).await as usize;
thread.with_context(|cx| {
#[cfg(target_arch = "x86_64")]
{
cx.general.rax = ret;
}
#[cfg(target_arch = "aarch64")]
{
cx.general.x0 = ret;
}
});
}
|
//! Simple bedgraph struct and writing utility.
#[derive(Debug, PartialEq)]
pub struct BGBlock {
pub seqid: String,
pub start: usize,
pub end: usize,
pub score: f64,
}
impl BGBlock {
pub fn new(seqid: &str, start: usize, end: usize, score: f64) -> Self {
BGBlock {
seqid: seqid.to_string(),
start: start,
end: end,
score: score,
}
}
}
impl std::fmt::Display for BGBlock {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"{}\t{}\t{}\t{}",
self.seqid, self.start, self.end, self.score
)
}
}
|
use super::*;
use std::ops::*;
impl BitOr<Mask> for Mask {
type Output = Mask;
fn bitor(self, rhs: Mask) -> Self::Output {
Mask(self.0 | rhs.0)
}
}
impl BitOrAssign<Mask> for Mask {
fn bitor_assign(&mut self, rhs: Mask) {
self.0 |= rhs.0
}
}
impl BitAnd<Mask> for Mask {
type Output = Mask;
fn bitand(self, rhs: Mask) -> Self::Output {
Mask(self.0 & rhs.0)
}
}
impl BitAndAssign<Mask> for Mask {
fn bitand_assign(&mut self, rhs: Mask) {
self.0 &= rhs.0
}
}
impl BitXor<Mask> for Mask {
type Output = Mask;
fn bitxor(self, rhs: Mask) -> Self::Output {
Mask(self.0 & rhs.0)
}
}
impl BitXorAssign<Mask> for Mask {
fn bitxor_assign(&mut self, rhs: Mask) {
self.0 &= rhs.0
}
}
impl Shl<u8> for Mask {
type Output = Mask;
fn shl(self, rhs: u8) -> Self::Output {
Mask(self.0 << rhs)
}
}
impl ShlAssign<u8> for Mask {
fn shl_assign(&mut self, rhs: u8) {
self.0 <<= rhs
}
}
impl Shr<u8> for Mask {
type Output = Mask;
fn shr(self, rhs: u8) -> Self::Output {
Mask(self.0 >> rhs)
}
}
impl ShrAssign<u8> for Mask {
fn shr_assign(&mut self, rhs: u8) {
self.0 >>= rhs
}
}
impl Not for Mask {
type Output = Mask;
fn not(self) -> Self::Output {
Mask(!self.0)
}
}
#[cfg(test)]
mod tests {
use super::super::masks::*;
#[test]
fn shr() {
assert_eq!(A8 >> 1, EMPTY);
}
}
|
#[doc = "Reader of register IMR"]
pub type R = crate::R<u32, super::IMR>;
#[doc = "Writer for register IMR"]
pub type W = crate::W<u32, super::IMR>;
#[doc = "Register IMR `reset()`'s with value 0"]
impl crate::ResetValue for super::IMR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `TXISIE`"]
pub type TXISIE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TXISIE`"]
pub struct TXISIE_W<'a> {
w: &'a mut W,
}
impl<'a> TXISIE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `TXMSGDISCIE`"]
pub type TXMSGDISCIE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TXMSGDISCIE`"]
pub struct TXMSGDISCIE_W<'a> {
w: &'a mut W,
}
impl<'a> TXMSGDISCIE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `TXMSGSENTIE`"]
pub type TXMSGSENTIE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TXMSGSENTIE`"]
pub struct TXMSGSENTIE_W<'a> {
w: &'a mut W,
}
impl<'a> TXMSGSENTIE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `TXMSGABTIE`"]
pub type TXMSGABTIE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TXMSGABTIE`"]
pub struct TXMSGABTIE_W<'a> {
w: &'a mut W,
}
impl<'a> TXMSGABTIE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `HRSTDISCIE`"]
pub type HRSTDISCIE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `HRSTDISCIE`"]
pub struct HRSTDISCIE_W<'a> {
w: &'a mut W,
}
impl<'a> HRSTDISCIE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `HRSTSENTIE`"]
pub type HRSTSENTIE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `HRSTSENTIE`"]
pub struct HRSTSENTIE_W<'a> {
w: &'a mut W,
}
impl<'a> HRSTSENTIE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `TXUNDIE`"]
pub type TXUNDIE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TXUNDIE`"]
pub struct TXUNDIE_W<'a> {
w: &'a mut W,
}
impl<'a> TXUNDIE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `RXNEIE`"]
pub type RXNEIE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RXNEIE`"]
pub struct RXNEIE_W<'a> {
w: &'a mut W,
}
impl<'a> RXNEIE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `RXORDDETIE`"]
pub type RXORDDETIE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RXORDDETIE`"]
pub struct RXORDDETIE_W<'a> {
w: &'a mut W,
}
impl<'a> RXORDDETIE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "Reader of field `RXHRSTDETIE`"]
pub type RXHRSTDETIE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RXHRSTDETIE`"]
pub struct RXHRSTDETIE_W<'a> {
w: &'a mut W,
}
impl<'a> RXHRSTDETIE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "Reader of field `RXOVRIE`"]
pub type RXOVRIE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RXOVRIE`"]
pub struct RXOVRIE_W<'a> {
w: &'a mut W,
}
impl<'a> RXOVRIE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
#[doc = "Reader of field `RXMSGENDIE`"]
pub type RXMSGENDIE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RXMSGENDIE`"]
pub struct RXMSGENDIE_W<'a> {
w: &'a mut W,
}
impl<'a> RXMSGENDIE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
#[doc = "Reader of field `TYPECEVT1IE`"]
pub type TYPECEVT1IE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TYPECEVT1IE`"]
pub struct TYPECEVT1IE_W<'a> {
w: &'a mut W,
}
impl<'a> TYPECEVT1IE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);
self.w
}
}
#[doc = "Reader of field `TYPECEVT2IE`"]
pub type TYPECEVT2IE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TYPECEVT2IE`"]
pub struct TYPECEVT2IE_W<'a> {
w: &'a mut W,
}
impl<'a> TYPECEVT2IE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);
self.w
}
}
#[doc = "Reader of field `FRSEVTIE`"]
pub type FRSEVTIE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `FRSEVTIE`"]
pub struct FRSEVTIE_W<'a> {
w: &'a mut W,
}
impl<'a> FRSEVTIE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20);
self.w
}
}
impl R {
#[doc = "Bit 0 - TXISIE"]
#[inline(always)]
pub fn txisie(&self) -> TXISIE_R {
TXISIE_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - TXMSGDISCIE"]
#[inline(always)]
pub fn txmsgdiscie(&self) -> TXMSGDISCIE_R {
TXMSGDISCIE_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - TXMSGSENTIE"]
#[inline(always)]
pub fn txmsgsentie(&self) -> TXMSGSENTIE_R {
TXMSGSENTIE_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - TXMSGABTIE"]
#[inline(always)]
pub fn txmsgabtie(&self) -> TXMSGABTIE_R {
TXMSGABTIE_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - HRSTDISCIE"]
#[inline(always)]
pub fn hrstdiscie(&self) -> HRSTDISCIE_R {
HRSTDISCIE_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - HRSTSENTIE"]
#[inline(always)]
pub fn hrstsentie(&self) -> HRSTSENTIE_R {
HRSTSENTIE_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - TXUNDIE"]
#[inline(always)]
pub fn txundie(&self) -> TXUNDIE_R {
TXUNDIE_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 8 - RXNEIE"]
#[inline(always)]
pub fn rxneie(&self) -> RXNEIE_R {
RXNEIE_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - RXORDDETIE"]
#[inline(always)]
pub fn rxorddetie(&self) -> RXORDDETIE_R {
RXORDDETIE_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - RXHRSTDETIE"]
#[inline(always)]
pub fn rxhrstdetie(&self) -> RXHRSTDETIE_R {
RXHRSTDETIE_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - RXOVRIE"]
#[inline(always)]
pub fn rxovrie(&self) -> RXOVRIE_R {
RXOVRIE_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 12 - RXMSGENDIE"]
#[inline(always)]
pub fn rxmsgendie(&self) -> RXMSGENDIE_R {
RXMSGENDIE_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 14 - TYPECEVT1IE"]
#[inline(always)]
pub fn typecevt1ie(&self) -> TYPECEVT1IE_R {
TYPECEVT1IE_R::new(((self.bits >> 14) & 0x01) != 0)
}
#[doc = "Bit 15 - TYPECEVT2IE"]
#[inline(always)]
pub fn typecevt2ie(&self) -> TYPECEVT2IE_R {
TYPECEVT2IE_R::new(((self.bits >> 15) & 0x01) != 0)
}
#[doc = "Bit 20 - FRSEVTIE"]
#[inline(always)]
pub fn frsevtie(&self) -> FRSEVTIE_R {
FRSEVTIE_R::new(((self.bits >> 20) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - TXISIE"]
#[inline(always)]
pub fn txisie(&mut self) -> TXISIE_W {
TXISIE_W { w: self }
}
#[doc = "Bit 1 - TXMSGDISCIE"]
#[inline(always)]
pub fn txmsgdiscie(&mut self) -> TXMSGDISCIE_W {
TXMSGDISCIE_W { w: self }
}
#[doc = "Bit 2 - TXMSGSENTIE"]
#[inline(always)]
pub fn txmsgsentie(&mut self) -> TXMSGSENTIE_W {
TXMSGSENTIE_W { w: self }
}
#[doc = "Bit 3 - TXMSGABTIE"]
#[inline(always)]
pub fn txmsgabtie(&mut self) -> TXMSGABTIE_W {
TXMSGABTIE_W { w: self }
}
#[doc = "Bit 4 - HRSTDISCIE"]
#[inline(always)]
pub fn hrstdiscie(&mut self) -> HRSTDISCIE_W {
HRSTDISCIE_W { w: self }
}
#[doc = "Bit 5 - HRSTSENTIE"]
#[inline(always)]
pub fn hrstsentie(&mut self) -> HRSTSENTIE_W {
HRSTSENTIE_W { w: self }
}
#[doc = "Bit 6 - TXUNDIE"]
#[inline(always)]
pub fn txundie(&mut self) -> TXUNDIE_W {
TXUNDIE_W { w: self }
}
#[doc = "Bit 8 - RXNEIE"]
#[inline(always)]
pub fn rxneie(&mut self) -> RXNEIE_W {
RXNEIE_W { w: self }
}
#[doc = "Bit 9 - RXORDDETIE"]
#[inline(always)]
pub fn rxorddetie(&mut self) -> RXORDDETIE_W {
RXORDDETIE_W { w: self }
}
#[doc = "Bit 10 - RXHRSTDETIE"]
#[inline(always)]
pub fn rxhrstdetie(&mut self) -> RXHRSTDETIE_W {
RXHRSTDETIE_W { w: self }
}
#[doc = "Bit 11 - RXOVRIE"]
#[inline(always)]
pub fn rxovrie(&mut self) -> RXOVRIE_W {
RXOVRIE_W { w: self }
}
#[doc = "Bit 12 - RXMSGENDIE"]
#[inline(always)]
pub fn rxmsgendie(&mut self) -> RXMSGENDIE_W {
RXMSGENDIE_W { w: self }
}
#[doc = "Bit 14 - TYPECEVT1IE"]
#[inline(always)]
pub fn typecevt1ie(&mut self) -> TYPECEVT1IE_W {
TYPECEVT1IE_W { w: self }
}
#[doc = "Bit 15 - TYPECEVT2IE"]
#[inline(always)]
pub fn typecevt2ie(&mut self) -> TYPECEVT2IE_W {
TYPECEVT2IE_W { w: self }
}
#[doc = "Bit 20 - FRSEVTIE"]
#[inline(always)]
pub fn frsevtie(&mut self) -> FRSEVTIE_W {
FRSEVTIE_W { w: self }
}
}
|
use prost::Message;
mod item {
include!(concat!(env!("OUT_DIR"), "/sample.item.rs"));
}
fn new_item(id: &str, price: i32) -> item::Item {
let mut d = item::Item::default();
d.item_id = id.to_string();
d.price = price;
d
}
#[link(wasm_import_module = "sample")]
extern {
fn log(ptr: *const u8, len: usize);
fn print_item(ptr: *const u8, len: usize);
}
#[no_mangle]
extern fn _malloc(size: usize) -> *mut u8 {
let buf = vec![0; size];
Box::into_raw(buf.into_boxed_slice()) as *mut u8
}
#[no_mangle]
extern fn _free(ptr: *mut u8) {
unsafe {
drop(Box::from_raw(ptr));
}
}
#[no_mangle]
extern fn send_item(ptr: *const u8, size: usize) {
unsafe {
let slice = std::slice::from_raw_parts(ptr, size);
let d = item::Item::decode(slice).unwrap();
let msg = format!("received '{:?}'", d);
log(msg.as_ptr(), msg.len());
}
}
fn main() {
let d = new_item("test1", 5);
let mut buf = Vec::with_capacity(d.encoded_len());
d.encode(&mut buf).unwrap();
unsafe {
print_item(buf.as_ptr(), buf.len());
}
}
|
extern crate enet;
use std::net::Ipv4Addr;
use enet::*;
use std::time::Duration;
fn main() {
let enet = Enet::new().expect("could not initialize ENet");
let mut host = enet
.create_host::<()>(
None,
10,
ChannelLimit::Maximum,
BandwidthLimit::Unlimited,
BandwidthLimit::Unlimited,
)
.expect("could not create host");
host.connect(&Address::new(Ipv4Addr::LOCALHOST, 9001), 10, 0)
.expect("connect failed");
let peer_id = loop {
let e = host
.service(Duration::from_secs(1))
.expect("service failed");
let e = match e {
Some(ev) => ev,
_ => continue,
};
println!("[client] event: {:#?}", e);
match e.kind {
EventKind::Connect => break e.peer_id,
EventKind::Disconnect { data } => {
println!(
"connection NOT successful, peer: {:?}, reason: {}",
e.peer_id, data
);
std::process::exit(0);
}
EventKind::Receive { .. } => {
panic!("unexpected Receive-event while waiting for connection")
}
};
};
// send a "hello"-like packet
host[peer_id]
.send_packet(
Packet::new(b"harro".to_vec(), PacketMode::ReliableSequenced).unwrap(),
1,
)
.unwrap();
// disconnect after all outgoing packets have been sent.
host[peer_id].disconnect_later(5);
loop {
let e = host.service(Duration::from_secs(1)).unwrap();
println!("received event: {:#?}", e);
}
}
|
use choice::*;
use history::*;
pub type Id = &'static str;
pub trait Strategy {
fn id(&self) -> Id;
/// return the strategy's next choice, based on the given history
fn choice(&self, history: &History) -> Choice;
}
/* Rust lets us create empty (Unit) structs, which are field-less. These are instantiated
just using the struct name and can have traits associated.
*/
#[derive(Debug)]
pub struct Never;
impl Strategy for Never {
fn id(&self) -> Id { "Never" }
fn choice(&self, _history: &History) -> Choice { false }
}
#[derive(Debug)]
pub struct Always;
impl Strategy for Always {
fn id(&self) -> Id { "Always" }
fn choice(&self, _history: &History) -> Choice { true }
}
#[derive(Debug)]
pub struct AlternateTrueFalse;
impl Strategy for AlternateTrueFalse {
fn id(&self) -> Id { "AlternateTrueFalse" }
fn choice(&self, history: &History) -> Choice {
// if the history is empty then return first choices
// else return the opposite to the previous choice.
match history.last() {
Some(cp) => !cp.0,
None => true,
}
}
}
#[derive(Debug)]
pub struct AlternateFalseTrue;
impl Strategy for AlternateFalseTrue {
fn id(&self) -> Id { "AlternateFalseTrue" }
fn choice(&self, history: &History) -> Choice {
// if the history is empty then return first choices
// else return the opposite to the previous choice.
match history.last() {
Some(cp) => !cp.0,
None => false,
}
}
}
// TitForTat start with true, all future choices are the same as the opponents last choice.
#[derive(Debug)]
pub struct TitForTat;
impl Strategy for TitForTat {
fn id(&self) -> Id { "TitForTat" }
fn choice(&self, history: &History) -> Choice {
// if the history is empty then return true
// else return the same as the opponents previous
match history.last() {
Some(cp) => cp.1,
None => true,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_never() {
let history = History::new(0);
let p1 = Never;
assert!(!p1.choice(&history));
}
#[test]
fn test_always() {
let history = History::new(0);
let p1 = Always;
assert!(p1.choice(&history));
}
#[test]
fn test_alternate1() {
let mut history = History::new(2);
let p1 = AlternateTrueFalse;
assert!(p1.choice(&history));
history.push(ChoicePair(true, true));
assert!(!p1.choice(&history));
history.push(ChoicePair(false, false));
assert!(p1.choice(&history));
}
#[test]
fn test_alternate2() {
let mut history = History::new(2);
let p1 = AlternateFalseTrue;
assert!(!p1.choice(&history));
history.push(ChoicePair(true, true));
assert!(!p1.choice(&history));
history.push(ChoicePair(false, false));
assert!(p1.choice(&history));
}
#[test]
fn test_titfortat() {
let mut history = History::new(3);
let p1 = TitForTat;
assert!(p1.choice(&history));
history.push(ChoicePair(true, true));
assert!(p1.choice(&history));
history.push(ChoicePair(true, false));
assert!(!p1.choice(&history));
history.push(ChoicePair(true, true));
assert!(p1.choice(&history));
}
}
|
pub mod workspaces;
pub use workspaces::Workspaces;
pub mod title;
pub use title::Title;
use {
std::{
cell::RefCell,
collections::HashMap,
ops::DerefMut,
sync::Arc,
thread,
},
swayipc::{
self,
Connection as Sway,
EventType,
reply::{CommandOutcome, Event, Workspace},
},
glib::{Continue, MainContext},
};
pub type Result<T> = swayipc::Fallible<T>;
#[derive(Clone)]
pub struct Connection(Arc<RefCell<Sway>>);
impl Connection {
pub fn build() -> Result<ConnectionBuilder> {
ConnectionBuilder::new()
}
fn new() -> Result<Connection> {
let conn = Sway::new()?;
Ok(Connection(Arc::new(RefCell::new(conn))))
}
pub fn with_connection<R>(&self, f: impl FnOnce(&mut Sway) -> R) -> R {
f(self.0.borrow_mut().deref_mut())
}
pub fn get_workspaces(&self) -> Result<Vec<Workspace>> {
self.with_connection(|conn| conn.get_workspaces())
}
pub fn run_commands(&self, commands: &str) -> Result<Vec<CommandOutcome>> {
self.with_connection(|conn| conn.run_command(commands))
}
}
//impl Drop for Connection {
// fn drop(&mut self) {
// // FIXME: we have to orphan the thread here -- i3ipc-rs uses an iterator instead of a
// // channel, so we have no way to wake it up
// }
//}
type Subscriber = Box<dyn FnMut(&Connection, &Event)>;
pub struct ConnectionBuilder {
conn: Connection,
subs: HashMap<EventType, Vec<Subscriber>>,
}
impl std::ops::Deref for ConnectionBuilder {
type Target = Connection;
fn deref(&self) -> &Connection {
&self.conn
}
}
impl ConnectionBuilder {
fn new() -> Result<ConnectionBuilder> {
Ok(ConnectionBuilder {
conn: Connection::new()?,
subs: HashMap::new(),
})
}
pub fn subscribe(
&mut self,
events: &[EventType],
f: impl FnMut(&Connection, &Event) + Clone + 'static)
{
let subscriber = Box::new(f);
for event in events {
let set = self.subs.entry(*event).or_insert(Vec::new());
set.push(subscriber.clone());
}
}
pub fn connect(self) -> Result<Connection> {
let (chan_tx, chan_rx) = MainContext::channel(Default::default());
let event_types: Vec<EventType> = self.subs
.keys()
.copied()
.collect();
chan_rx.attach(None, {
let conn = self.conn.clone();
let mut subs = self.subs;
move |e| {
// sigh
use EventType::*;
let sub_type = match e {
Event::Workspace(_) => Workspace,
Event::Mode(_) => Mode,
Event::Window(_) => Window,
Event::BarConfigUpdate(_) => BarConfigUpdate,
Event::Binding(_) => Binding,
Event::Shutdown(_) => Shutdown,
Event::Tick(_) => Tick,
Event::BarStateUpdate(_) => BarStateUpdate,
Event::Input(_) => Input
};
if let Some(subscribers) = subs.get_mut(&sub_type) {
for f in subscribers {
f(&conn, &e);
}
}
Continue(true)
}
});
thread::spawn({
let events = Sway::new()?.subscribe(&event_types)?;
move || {
for e in events {
let e = e.unwrap();
if let Err(_) = chan_tx.send(e) {
break;
}
}
}
});
Ok(self.conn)
}
}
|
/// Data tuple dimulai dengan tanda ( dan )
/// cara akses tuple dengan menggunakan index
/// Struct juga dapat menjadi struk tuple
fn reverse(pair: (i32, bool)) -> (bool, i32) {
let (integer, boolean) = pair;
(boolean, integer)
}
fn main() {
let x = (1, true);
let rev = reverse(x);
println!("t0 = {}, t1= {}", rev.0, rev.1);
}
|
#![allow(dead_code)]
extern crate cgmath;
extern crate embree;
extern crate support;
use cgmath::{InnerSpace, Matrix, Matrix4, SquareMatrix, Vector3, Vector4};
use embree::{
Device, Geometry, Instance, IntersectContext, QuadMesh, Ray, RayHit, Scene, TriangleMesh,
};
use std::{f32, u32};
use support::Camera;
/// Make a triangulated sphere, from the Embree tutorial:
/// https://github.com/embree/embree/blob/master/tutorials/instanced_geometry/instanced_geometry_device.cpp
fn make_triangulated_sphere<'a>(
device: &'a Device,
pos: Vector3<f32>,
radius: f32,
) -> Geometry<'a> {
let num_phi = 5;
let num_theta = 2 * num_phi;
let mut mesh = TriangleMesh::unanimated(
device,
2 * num_theta * (num_phi - 1),
num_theta * (num_phi + 1),
);
{
let mut verts = mesh.vertex_buffer.map();
let mut tris = mesh.index_buffer.map();
let inv_num_phi = 1.0 / (num_phi as f32);
let inv_num_theta = 1.0 / (num_theta as f32);
for phi in 0..num_phi + 1 {
for theta in 0..num_theta {
let phif = phi as f32 * f32::consts::PI * inv_num_phi;
let thetaf = theta as f32 * f32::consts::PI * 2.0 * inv_num_theta;
let v = &mut verts[phi * num_theta + theta];
v.x = pos.x + radius * f32::sin(phif) * f32::sin(thetaf);
v.y = pos.y + radius * f32::cos(phif);
v.z = pos.z + radius * f32::sin(phif) * f32::cos(thetaf);
}
}
let mut tri = 0;
for phi in 1..num_phi + 1 {
for theta in 1..num_theta + 1 {
let p00 = (phi - 1) * num_theta + theta - 1;
let p01 = (phi - 1) * num_theta + theta % num_theta;
let p10 = phi * num_theta + theta - 1;
let p11 = phi * num_theta + theta % num_theta;
if phi > 1 {
tris[tri].x = p10 as u32;
tris[tri].y = p01 as u32;
tris[tri].z = p00 as u32;
tri += 1;
}
if phi < num_phi {
tris[tri].x = p11 as u32;
tris[tri].y = p01 as u32;
tris[tri].z = p10 as u32;
tri += 1;
}
}
}
}
let mut mesh = Geometry::Triangle(mesh);
mesh.commit();
mesh
}
fn make_ground_plane<'a>(device: &'a Device) -> Geometry<'a> {
let mut mesh = QuadMesh::unanimated(device, 1, 4);
{
let mut verts = mesh.vertex_buffer.map();
let mut quads = mesh.index_buffer.map();
verts[0] = Vector4::new(-10.0, -2.0, -10.0, 0.0);
verts[1] = Vector4::new(-10.0, -2.0, 10.0, 0.0);
verts[2] = Vector4::new(10.0, -2.0, 10.0, 0.0);
verts[3] = Vector4::new(10.0, -2.0, -10.0, 0.0);
quads[0] = Vector4::new(0, 1, 2, 3);
}
let mut mesh = Geometry::Quad(mesh);
mesh.commit();
mesh
}
// Animate like the Embree example, returns the (transforms, normal_transforms)
fn animate_instances(time: f32, num_instances: usize) -> (Vec<Matrix4<f32>>, Vec<Matrix4<f32>>) {
let t0 = 0.7 * time;
let t1 = 1.5 * time;
let rot = Matrix4::from_cols(
Vector4::new(f32::cos(t1), 0.0, f32::sin(t1), 0.0),
Vector4::new(0.0, 1.0, 0.0, 0.0),
Vector4::new(-f32::sin(t1), 0.0, f32::cos(t1), 0.0),
Vector4::new(0.0, 0.0, 0.0, 1.0),
);
let mut transforms = Vec::with_capacity(num_instances);
let mut normal_transforms = Vec::with_capacity(num_instances);
for i in 0..num_instances {
let t = t0 + i as f32 * 2.0 * f32::consts::PI / 4.0;
let trans = Matrix4::<f32>::from_translation(
2.2 * Vector3::<f32>::new(f32::cos(t), 0.0, f32::sin(t)),
);
transforms.push(trans * rot);
normal_transforms.push(transforms[i].invert().unwrap().transpose());
}
(transforms, normal_transforms)
}
fn main() {
let mut display = support::Display::new(512, 512, "instancing");
let device = Device::new();
// Make the scene we'll instance with 4 triangulated spheres.
let spheres = vec![
make_triangulated_sphere(&device, Vector3::new(0.0, 0.0, 1.0), 0.5),
make_triangulated_sphere(&device, Vector3::new(1.0, 0.0, 0.0), 0.5),
make_triangulated_sphere(&device, Vector3::new(0.0, 0.0, -1.0), 0.5),
make_triangulated_sphere(&device, Vector3::new(-1.0, 0.0, 0.0), 0.5),
];
let mut instanced_scene = Scene::new(&device);
for s in spheres.into_iter() {
instanced_scene.attach_geometry(s);
}
let committed_instance = instanced_scene.commit();
// Make the instances first so their ids will be 0-3 that we can then use
// directly to index into the instance_colors
let instances = vec![
Instance::unanimated(&device, &committed_instance),
Instance::unanimated(&device, &committed_instance),
Instance::unanimated(&device, &committed_instance),
Instance::unanimated(&device, &committed_instance),
];
let num_instances = instances.len();
let mut scene = Scene::new(&device);
for i in instances.into_iter() {
scene.attach_geometry(Geometry::Instance(i));
}
let instance_colors = vec![
vec![
Vector3::new(0.25, 0.0, 0.0),
Vector3::new(0.5, 0.0, 0.0),
Vector3::new(0.75, 0.0, 0.0),
Vector3::new(1.00, 0.0, 0.0),
],
vec![
Vector3::new(0.0, 0.25, 0.0),
Vector3::new(0.0, 0.50, 0.0),
Vector3::new(0.0, 0.75, 0.0),
Vector3::new(0.0, 1.00, 0.0),
],
vec![
Vector3::new(0.0, 0.0, 0.25),
Vector3::new(0.0, 0.0, 0.50),
Vector3::new(0.0, 0.0, 0.75),
Vector3::new(0.0, 0.0, 1.00),
],
vec![
Vector3::new(0.25, 0.25, 0.0),
Vector3::new(0.50, 0.50, 0.0),
Vector3::new(0.75, 0.75, 0.0),
Vector3::new(1.00, 1.00, 0.0),
],
];
let ground = make_ground_plane(&device);
let ground_id = scene.attach_geometry(ground);
let light_dir = Vector3::new(1.0, 1.0, -1.0).normalize();
let mut intersection_ctx = IntersectContext::coherent();
display.run(|image, camera_pose, time| {
for p in image.iter_mut() {
*p = 0;
}
// Update scene transformations
let (transforms, normal_transforms) = animate_instances(time, num_instances);
let mut tfm_iter = transforms.iter();
for g in scene.iter_mut() {
if let Geometry::Instance(ref mut inst) = g.1 {
inst.set_transform(tfm_iter.next().expect("out of bounds tfm"));
}
// A bit annoying here that we can't call the mut on the geometry
// part because we borred the inner instance piece as mutable
g.1.commit();
}
let rtscene = scene.commit();
let img_dims = image.dimensions();
let camera = Camera::look_dir(
camera_pose.pos,
camera_pose.dir,
camera_pose.up,
55.0,
img_dims,
);
// Render the scene
for j in 0..img_dims.1 {
for i in 0..img_dims.0 {
let dir = camera.ray_dir((i as f32 + 0.5, j as f32 + 0.5));
let mut ray_hit = RayHit::new(Ray::new(camera.pos, dir));
rtscene.intersect(&mut intersection_ctx, &mut ray_hit);
if ray_hit.hit.hit() {
// Transform the normals of the instances into world space with the normal_transforms
let hit = &ray_hit.hit;
let geom_id = hit.geomID;
let inst_id = hit.instID[0];
let mut normal = Vector3::new(hit.Ng_x, hit.Ng_y, hit.Ng_z).normalize();
if inst_id != u32::MAX {
let v = normal_transforms[inst_id as usize]
* Vector4::new(normal.x, normal.y, normal.z, 0.0);
normal = Vector3::new(v.x, v.y, v.z).normalize()
}
let mut illum = 0.3;
let shadow_pos = camera.pos + dir * ray_hit.ray.tfar;
let mut shadow_ray = Ray::segment(shadow_pos, light_dir, 0.001, f32::INFINITY);
rtscene.occluded(&mut intersection_ctx, &mut shadow_ray);
if shadow_ray.tfar >= 0.0 {
illum =
support::clamp(illum + f32::max(light_dir.dot(normal), 0.0), 0.0, 1.0);
}
let p = image.get_pixel_mut(i, j);
if inst_id == u32::MAX && geom_id == ground_id {
p[0] = (255.0 * illum) as u8;
p[1] = p[0];
p[2] = p[0];
} else {
// Shade the instances using their color
let color = &instance_colors[inst_id as usize][geom_id as usize];
p[0] = (255.0 * illum * color.x) as u8;
p[1] = (255.0 * illum * color.y) as u8;
p[2] = (255.0 * illum * color.z) as u8;
}
}
}
}
});
}
|
use super::helpers::{remainder_ref, remainder_reuse, reverse_remainder};
use crate::integer::Integer;
use core::ops::Rem;
// Rem The remainder operator %.
// ['Integer', 'Integer', 'Integer', 'Integer::remainder_assign', 'lhs',
// ['ref_mut'], ['ref']]
impl Rem<Integer> for Integer {
type Output = Integer;
fn rem(mut self, rhs: Integer) -> Self::Output {
Integer::remainder_assign(&mut self, &rhs);
self
}
}
// ['Integer', '&Integer', 'Integer', 'Integer::remainder_assign', 'lhs',
// ['ref_mut'], []]
impl Rem<&Integer> for Integer {
type Output = Integer;
fn rem(mut self, rhs: &Integer) -> Self::Output {
Integer::remainder_assign(&mut self, rhs);
self
}
}
// ['&Integer', 'Integer', 'Integer', 'Integer::remainder', 'no', [], ['ref']]
impl Rem<Integer> for &Integer {
type Output = Integer;
fn rem(self, rhs: Integer) -> Self::Output {
Integer::remainder(self, &rhs)
}
}
// ['&Integer', '&Integer', 'Integer', 'Integer::remainder', 'no', [], []]
impl Rem<&Integer> for &Integer {
type Output = Integer;
fn rem(self, rhs: &Integer) -> Self::Output {
Integer::remainder(self, rhs)
}
}
// ['Integer', 'i8', 'i8', 'Integer::remainder_c_long', 'no', ['ref'], []]
impl Rem<i8> for Integer {
type Output = i8;
fn rem(self, rhs: i8) -> Self::Output {
Integer::remainder_c_long(&self, rhs)
}
}
// ['Integer', '&i8', 'i8', 'Integer::remainder_c_long', 'no', ['ref'],
// ['deref']]
impl Rem<&i8> for Integer {
type Output = i8;
fn rem(self, rhs: &i8) -> Self::Output {
Integer::remainder_c_long(&self, *rhs)
}
}
// ['&Integer', 'i8', 'i8', 'Integer::remainder_c_long', 'no', [], []]
impl Rem<i8> for &Integer {
type Output = i8;
fn rem(self, rhs: i8) -> Self::Output {
Integer::remainder_c_long(self, rhs)
}
}
// ['&Integer', '&i8', 'i8', 'Integer::remainder_c_long', 'no', [], ['deref']]
impl Rem<&i8> for &Integer {
type Output = i8;
fn rem(self, rhs: &i8) -> Self::Output {
Integer::remainder_c_long(self, *rhs)
}
}
// ['i8', 'Integer', 'i8', 'reverse_remainder', 'no', [], ['ref']]
impl Rem<Integer> for i8 {
type Output = i8;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(self, &rhs)
}
}
// ['i8', '&Integer', 'i8', 'reverse_remainder', 'no', [], []]
impl Rem<&Integer> for i8 {
type Output = i8;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(self, rhs)
}
}
// ['&i8', 'Integer', 'i8', 'reverse_remainder', 'no', ['deref'], ['ref']]
impl Rem<Integer> for &i8 {
type Output = i8;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(*self, &rhs)
}
}
// ['&i8', '&Integer', 'i8', 'reverse_remainder', 'no', ['deref'], []]
impl Rem<&Integer> for &i8 {
type Output = i8;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(*self, rhs)
}
}
// ['Integer', 'u8', 'u8', 'Integer::remainder_c_long', 'no', ['ref'], []]
impl Rem<u8> for Integer {
type Output = u8;
fn rem(self, rhs: u8) -> Self::Output {
Integer::remainder_c_long(&self, rhs)
}
}
// ['Integer', '&u8', 'u8', 'Integer::remainder_c_long', 'no', ['ref'],
// ['deref']]
impl Rem<&u8> for Integer {
type Output = u8;
fn rem(self, rhs: &u8) -> Self::Output {
Integer::remainder_c_long(&self, *rhs)
}
}
// ['&Integer', 'u8', 'u8', 'Integer::remainder_c_long', 'no', [], []]
impl Rem<u8> for &Integer {
type Output = u8;
fn rem(self, rhs: u8) -> Self::Output {
Integer::remainder_c_long(self, rhs)
}
}
// ['&Integer', '&u8', 'u8', 'Integer::remainder_c_long', 'no', [], ['deref']]
impl Rem<&u8> for &Integer {
type Output = u8;
fn rem(self, rhs: &u8) -> Self::Output {
Integer::remainder_c_long(self, *rhs)
}
}
// ['u8', 'Integer', 'u8', 'reverse_remainder', 'no', [], ['ref']]
impl Rem<Integer> for u8 {
type Output = u8;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(self, &rhs)
}
}
// ['u8', '&Integer', 'u8', 'reverse_remainder', 'no', [], []]
impl Rem<&Integer> for u8 {
type Output = u8;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(self, rhs)
}
}
// ['&u8', 'Integer', 'u8', 'reverse_remainder', 'no', ['deref'], ['ref']]
impl Rem<Integer> for &u8 {
type Output = u8;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(*self, &rhs)
}
}
// ['&u8', '&Integer', 'u8', 'reverse_remainder', 'no', ['deref'], []]
impl Rem<&Integer> for &u8 {
type Output = u8;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(*self, rhs)
}
}
// ['Integer', 'i16', 'i16', 'Integer::remainder_c_long', 'no', ['ref'], []]
impl Rem<i16> for Integer {
type Output = i16;
fn rem(self, rhs: i16) -> Self::Output {
Integer::remainder_c_long(&self, rhs)
}
}
// ['Integer', '&i16', 'i16', 'Integer::remainder_c_long', 'no', ['ref'],
// ['deref']]
impl Rem<&i16> for Integer {
type Output = i16;
fn rem(self, rhs: &i16) -> Self::Output {
Integer::remainder_c_long(&self, *rhs)
}
}
// ['&Integer', 'i16', 'i16', 'Integer::remainder_c_long', 'no', [], []]
impl Rem<i16> for &Integer {
type Output = i16;
fn rem(self, rhs: i16) -> Self::Output {
Integer::remainder_c_long(self, rhs)
}
}
// ['&Integer', '&i16', 'i16', 'Integer::remainder_c_long', 'no', [], ['deref']]
impl Rem<&i16> for &Integer {
type Output = i16;
fn rem(self, rhs: &i16) -> Self::Output {
Integer::remainder_c_long(self, *rhs)
}
}
// ['i16', 'Integer', 'i16', 'reverse_remainder', 'no', [], ['ref']]
impl Rem<Integer> for i16 {
type Output = i16;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(self, &rhs)
}
}
// ['i16', '&Integer', 'i16', 'reverse_remainder', 'no', [], []]
impl Rem<&Integer> for i16 {
type Output = i16;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(self, rhs)
}
}
// ['&i16', 'Integer', 'i16', 'reverse_remainder', 'no', ['deref'], ['ref']]
impl Rem<Integer> for &i16 {
type Output = i16;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(*self, &rhs)
}
}
// ['&i16', '&Integer', 'i16', 'reverse_remainder', 'no', ['deref'], []]
impl Rem<&Integer> for &i16 {
type Output = i16;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(*self, rhs)
}
}
// ['Integer', 'u16', 'u16', 'Integer::remainder_c_long', 'no', ['ref'], []]
impl Rem<u16> for Integer {
type Output = u16;
fn rem(self, rhs: u16) -> Self::Output {
Integer::remainder_c_long(&self, rhs)
}
}
// ['Integer', '&u16', 'u16', 'Integer::remainder_c_long', 'no', ['ref'],
// ['deref']]
impl Rem<&u16> for Integer {
type Output = u16;
fn rem(self, rhs: &u16) -> Self::Output {
Integer::remainder_c_long(&self, *rhs)
}
}
// ['&Integer', 'u16', 'u16', 'Integer::remainder_c_long', 'no', [], []]
impl Rem<u16> for &Integer {
type Output = u16;
fn rem(self, rhs: u16) -> Self::Output {
Integer::remainder_c_long(self, rhs)
}
}
// ['&Integer', '&u16', 'u16', 'Integer::remainder_c_long', 'no', [], ['deref']]
impl Rem<&u16> for &Integer {
type Output = u16;
fn rem(self, rhs: &u16) -> Self::Output {
Integer::remainder_c_long(self, *rhs)
}
}
// ['u16', 'Integer', 'u16', 'reverse_remainder', 'no', [], ['ref']]
impl Rem<Integer> for u16 {
type Output = u16;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(self, &rhs)
}
}
// ['u16', '&Integer', 'u16', 'reverse_remainder', 'no', [], []]
impl Rem<&Integer> for u16 {
type Output = u16;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(self, rhs)
}
}
// ['&u16', 'Integer', 'u16', 'reverse_remainder', 'no', ['deref'], ['ref']]
impl Rem<Integer> for &u16 {
type Output = u16;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(*self, &rhs)
}
}
// ['&u16', '&Integer', 'u16', 'reverse_remainder', 'no', ['deref'], []]
impl Rem<&Integer> for &u16 {
type Output = u16;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(*self, rhs)
}
}
// ['Integer', 'i32', 'i32', 'Integer::remainder_c_long', 'no', ['ref'], []]
impl Rem<i32> for Integer {
type Output = i32;
fn rem(self, rhs: i32) -> Self::Output {
Integer::remainder_c_long(&self, rhs)
}
}
// ['Integer', '&i32', 'i32', 'Integer::remainder_c_long', 'no', ['ref'],
// ['deref']]
impl Rem<&i32> for Integer {
type Output = i32;
fn rem(self, rhs: &i32) -> Self::Output {
Integer::remainder_c_long(&self, *rhs)
}
}
// ['&Integer', 'i32', 'i32', 'Integer::remainder_c_long', 'no', [], []]
impl Rem<i32> for &Integer {
type Output = i32;
fn rem(self, rhs: i32) -> Self::Output {
Integer::remainder_c_long(self, rhs)
}
}
// ['&Integer', '&i32', 'i32', 'Integer::remainder_c_long', 'no', [], ['deref']]
impl Rem<&i32> for &Integer {
type Output = i32;
fn rem(self, rhs: &i32) -> Self::Output {
Integer::remainder_c_long(self, *rhs)
}
}
// ['i32', 'Integer', 'i32', 'reverse_remainder', 'no', [], ['ref']]
impl Rem<Integer> for i32 {
type Output = i32;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(self, &rhs)
}
}
// ['i32', '&Integer', 'i32', 'reverse_remainder', 'no', [], []]
impl Rem<&Integer> for i32 {
type Output = i32;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(self, rhs)
}
}
// ['&i32', 'Integer', 'i32', 'reverse_remainder', 'no', ['deref'], ['ref']]
impl Rem<Integer> for &i32 {
type Output = i32;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(*self, &rhs)
}
}
// ['&i32', '&Integer', 'i32', 'reverse_remainder', 'no', ['deref'], []]
impl Rem<&Integer> for &i32 {
type Output = i32;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(*self, rhs)
}
}
// ['Integer', 'u32', 'u32', 'Integer::remainder_c_long', 'no', ['ref'], []]
#[cfg(all(target_pointer_width = "64", not(windows)))]
impl Rem<u32> for Integer {
type Output = u32;
fn rem(self, rhs: u32) -> Self::Output {
Integer::remainder_c_long(&self, rhs)
}
}
// ['Integer', '&u32', 'u32', 'Integer::remainder_c_long', 'no', ['ref'],
// ['deref']]
#[cfg(all(target_pointer_width = "64", not(windows)))]
impl Rem<&u32> for Integer {
type Output = u32;
fn rem(self, rhs: &u32) -> Self::Output {
Integer::remainder_c_long(&self, *rhs)
}
}
// ['&Integer', 'u32', 'u32', 'Integer::remainder_c_long', 'no', [], []]
#[cfg(all(target_pointer_width = "64", not(windows)))]
impl Rem<u32> for &Integer {
type Output = u32;
fn rem(self, rhs: u32) -> Self::Output {
Integer::remainder_c_long(self, rhs)
}
}
// ['&Integer', '&u32', 'u32', 'Integer::remainder_c_long', 'no', [], ['deref']]
#[cfg(all(target_pointer_width = "64", not(windows)))]
impl Rem<&u32> for &Integer {
type Output = u32;
fn rem(self, rhs: &u32) -> Self::Output {
Integer::remainder_c_long(self, *rhs)
}
}
// ['u32', 'Integer', 'u32', 'reverse_remainder', 'no', [], ['ref']]
#[cfg(all(target_pointer_width = "64", not(windows)))]
impl Rem<Integer> for u32 {
type Output = u32;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(self, &rhs)
}
}
// ['u32', '&Integer', 'u32', 'reverse_remainder', 'no', [], []]
#[cfg(all(target_pointer_width = "64", not(windows)))]
impl Rem<&Integer> for u32 {
type Output = u32;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(self, rhs)
}
}
// ['&u32', 'Integer', 'u32', 'reverse_remainder', 'no', ['deref'], ['ref']]
#[cfg(all(target_pointer_width = "64", not(windows)))]
impl Rem<Integer> for &u32 {
type Output = u32;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(*self, &rhs)
}
}
// ['&u32', '&Integer', 'u32', 'reverse_remainder', 'no', ['deref'], []]
#[cfg(all(target_pointer_width = "64", not(windows)))]
impl Rem<&Integer> for &u32 {
type Output = u32;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(*self, rhs)
}
}
// ['Integer', 'i64', 'i64', 'Integer::remainder_c_long', 'no', ['ref'], []]
#[cfg(all(target_pointer_width = "64", not(windows)))]
impl Rem<i64> for Integer {
type Output = i64;
fn rem(self, rhs: i64) -> Self::Output {
Integer::remainder_c_long(&self, rhs)
}
}
// ['Integer', '&i64', 'i64', 'Integer::remainder_c_long', 'no', ['ref'],
// ['deref']]
#[cfg(all(target_pointer_width = "64", not(windows)))]
impl Rem<&i64> for Integer {
type Output = i64;
fn rem(self, rhs: &i64) -> Self::Output {
Integer::remainder_c_long(&self, *rhs)
}
}
// ['&Integer', 'i64', 'i64', 'Integer::remainder_c_long', 'no', [], []]
#[cfg(all(target_pointer_width = "64", not(windows)))]
impl Rem<i64> for &Integer {
type Output = i64;
fn rem(self, rhs: i64) -> Self::Output {
Integer::remainder_c_long(self, rhs)
}
}
// ['&Integer', '&i64', 'i64', 'Integer::remainder_c_long', 'no', [], ['deref']]
#[cfg(all(target_pointer_width = "64", not(windows)))]
impl Rem<&i64> for &Integer {
type Output = i64;
fn rem(self, rhs: &i64) -> Self::Output {
Integer::remainder_c_long(self, *rhs)
}
}
// ['i64', 'Integer', 'i64', 'reverse_remainder', 'no', [], ['ref']]
#[cfg(all(target_pointer_width = "64", not(windows)))]
impl Rem<Integer> for i64 {
type Output = i64;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(self, &rhs)
}
}
// ['i64', '&Integer', 'i64', 'reverse_remainder', 'no', [], []]
#[cfg(all(target_pointer_width = "64", not(windows)))]
impl Rem<&Integer> for i64 {
type Output = i64;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(self, rhs)
}
}
// ['&i64', 'Integer', 'i64', 'reverse_remainder', 'no', ['deref'], ['ref']]
#[cfg(all(target_pointer_width = "64", not(windows)))]
impl Rem<Integer> for &i64 {
type Output = i64;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(*self, &rhs)
}
}
// ['&i64', '&Integer', 'i64', 'reverse_remainder', 'no', ['deref'], []]
#[cfg(all(target_pointer_width = "64", not(windows)))]
impl Rem<&Integer> for &i64 {
type Output = i64;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(*self, rhs)
}
}
// ['Integer', 'u32', 'u32', 'remainder_reuse', 'no', [], []]
#[cfg(not(all(target_pointer_width = "64", not(windows))))]
impl Rem<u32> for Integer {
type Output = u32;
fn rem(self, rhs: u32) -> Self::Output {
remainder_reuse(self, rhs)
}
}
// ['Integer', '&u32', 'u32', 'remainder_reuse', 'no', [], ['deref']]
#[cfg(not(all(target_pointer_width = "64", not(windows))))]
impl Rem<&u32> for Integer {
type Output = u32;
fn rem(self, rhs: &u32) -> Self::Output {
remainder_reuse(self, *rhs)
}
}
// ['&Integer', 'u32', 'u32', 'remainder_ref', 'no', [], []]
#[cfg(not(all(target_pointer_width = "64", not(windows))))]
impl Rem<u32> for &Integer {
type Output = u32;
fn rem(self, rhs: u32) -> Self::Output {
remainder_ref(self, rhs)
}
}
// ['&Integer', '&u32', 'u32', 'remainder_ref', 'no', [], ['deref']]
#[cfg(not(all(target_pointer_width = "64", not(windows))))]
impl Rem<&u32> for &Integer {
type Output = u32;
fn rem(self, rhs: &u32) -> Self::Output {
remainder_ref(self, *rhs)
}
}
// ['u32', 'Integer', 'u32', 'reverse_remainder', 'no', [], ['ref']]
#[cfg(not(all(target_pointer_width = "64", not(windows))))]
impl Rem<Integer> for u32 {
type Output = u32;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(self, &rhs)
}
}
// ['u32', '&Integer', 'u32', 'reverse_remainder', 'no', [], []]
#[cfg(not(all(target_pointer_width = "64", not(windows))))]
impl Rem<&Integer> for u32 {
type Output = u32;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(self, rhs)
}
}
// ['&u32', 'Integer', 'u32', 'reverse_remainder', 'no', ['deref'], ['ref']]
#[cfg(not(all(target_pointer_width = "64", not(windows))))]
impl Rem<Integer> for &u32 {
type Output = u32;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(*self, &rhs)
}
}
// ['&u32', '&Integer', 'u32', 'reverse_remainder', 'no', ['deref'], []]
#[cfg(not(all(target_pointer_width = "64", not(windows))))]
impl Rem<&Integer> for &u32 {
type Output = u32;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(*self, rhs)
}
}
// ['Integer', 'i64', 'i64', 'remainder_reuse', 'no', [], []]
#[cfg(not(all(target_pointer_width = "64", not(windows))))]
impl Rem<i64> for Integer {
type Output = i64;
fn rem(self, rhs: i64) -> Self::Output {
remainder_reuse(self, rhs)
}
}
// ['Integer', '&i64', 'i64', 'remainder_reuse', 'no', [], ['deref']]
#[cfg(not(all(target_pointer_width = "64", not(windows))))]
impl Rem<&i64> for Integer {
type Output = i64;
fn rem(self, rhs: &i64) -> Self::Output {
remainder_reuse(self, *rhs)
}
}
// ['&Integer', 'i64', 'i64', 'remainder_ref', 'no', [], []]
#[cfg(not(all(target_pointer_width = "64", not(windows))))]
impl Rem<i64> for &Integer {
type Output = i64;
fn rem(self, rhs: i64) -> Self::Output {
remainder_ref(self, rhs)
}
}
// ['&Integer', '&i64', 'i64', 'remainder_ref', 'no', [], ['deref']]
#[cfg(not(all(target_pointer_width = "64", not(windows))))]
impl Rem<&i64> for &Integer {
type Output = i64;
fn rem(self, rhs: &i64) -> Self::Output {
remainder_ref(self, *rhs)
}
}
// ['i64', 'Integer', 'i64', 'reverse_remainder', 'no', [], ['ref']]
#[cfg(not(all(target_pointer_width = "64", not(windows))))]
impl Rem<Integer> for i64 {
type Output = i64;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(self, &rhs)
}
}
// ['i64', '&Integer', 'i64', 'reverse_remainder', 'no', [], []]
#[cfg(not(all(target_pointer_width = "64", not(windows))))]
impl Rem<&Integer> for i64 {
type Output = i64;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(self, rhs)
}
}
// ['&i64', 'Integer', 'i64', 'reverse_remainder', 'no', ['deref'], ['ref']]
#[cfg(not(all(target_pointer_width = "64", not(windows))))]
impl Rem<Integer> for &i64 {
type Output = i64;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(*self, &rhs)
}
}
// ['&i64', '&Integer', 'i64', 'reverse_remainder', 'no', ['deref'], []]
#[cfg(not(all(target_pointer_width = "64", not(windows))))]
impl Rem<&Integer> for &i64 {
type Output = i64;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(*self, rhs)
}
}
// ['Integer', 'u64', 'u64', 'remainder_reuse', 'no', [], []]
impl Rem<u64> for Integer {
type Output = u64;
fn rem(self, rhs: u64) -> Self::Output {
remainder_reuse(self, rhs)
}
}
// ['Integer', '&u64', 'u64', 'remainder_reuse', 'no', [], ['deref']]
impl Rem<&u64> for Integer {
type Output = u64;
fn rem(self, rhs: &u64) -> Self::Output {
remainder_reuse(self, *rhs)
}
}
// ['&Integer', 'u64', 'u64', 'remainder_ref', 'no', [], []]
impl Rem<u64> for &Integer {
type Output = u64;
fn rem(self, rhs: u64) -> Self::Output {
remainder_ref(self, rhs)
}
}
// ['&Integer', '&u64', 'u64', 'remainder_ref', 'no', [], ['deref']]
impl Rem<&u64> for &Integer {
type Output = u64;
fn rem(self, rhs: &u64) -> Self::Output {
remainder_ref(self, *rhs)
}
}
// ['u64', 'Integer', 'u64', 'reverse_remainder', 'no', [], ['ref']]
impl Rem<Integer> for u64 {
type Output = u64;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(self, &rhs)
}
}
// ['u64', '&Integer', 'u64', 'reverse_remainder', 'no', [], []]
impl Rem<&Integer> for u64 {
type Output = u64;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(self, rhs)
}
}
// ['&u64', 'Integer', 'u64', 'reverse_remainder', 'no', ['deref'], ['ref']]
impl Rem<Integer> for &u64 {
type Output = u64;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(*self, &rhs)
}
}
// ['&u64', '&Integer', 'u64', 'reverse_remainder', 'no', ['deref'], []]
impl Rem<&Integer> for &u64 {
type Output = u64;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(*self, rhs)
}
}
// ['Integer', 'i128', 'i128', 'remainder_reuse', 'no', [], []]
impl Rem<i128> for Integer {
type Output = i128;
fn rem(self, rhs: i128) -> Self::Output {
remainder_reuse(self, rhs)
}
}
// ['Integer', '&i128', 'i128', 'remainder_reuse', 'no', [], ['deref']]
impl Rem<&i128> for Integer {
type Output = i128;
fn rem(self, rhs: &i128) -> Self::Output {
remainder_reuse(self, *rhs)
}
}
// ['&Integer', 'i128', 'i128', 'remainder_ref', 'no', [], []]
impl Rem<i128> for &Integer {
type Output = i128;
fn rem(self, rhs: i128) -> Self::Output {
remainder_ref(self, rhs)
}
}
// ['&Integer', '&i128', 'i128', 'remainder_ref', 'no', [], ['deref']]
impl Rem<&i128> for &Integer {
type Output = i128;
fn rem(self, rhs: &i128) -> Self::Output {
remainder_ref(self, *rhs)
}
}
// ['i128', 'Integer', 'i128', 'reverse_remainder', 'no', [], ['ref']]
impl Rem<Integer> for i128 {
type Output = i128;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(self, &rhs)
}
}
// ['i128', '&Integer', 'i128', 'reverse_remainder', 'no', [], []]
impl Rem<&Integer> for i128 {
type Output = i128;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(self, rhs)
}
}
// ['&i128', 'Integer', 'i128', 'reverse_remainder', 'no', ['deref'], ['ref']]
impl Rem<Integer> for &i128 {
type Output = i128;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(*self, &rhs)
}
}
// ['&i128', '&Integer', 'i128', 'reverse_remainder', 'no', ['deref'], []]
impl Rem<&Integer> for &i128 {
type Output = i128;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(*self, rhs)
}
}
// ['Integer', 'u128', 'u128', 'remainder_reuse', 'no', [], []]
impl Rem<u128> for Integer {
type Output = u128;
fn rem(self, rhs: u128) -> Self::Output {
remainder_reuse(self, rhs)
}
}
// ['Integer', '&u128', 'u128', 'remainder_reuse', 'no', [], ['deref']]
impl Rem<&u128> for Integer {
type Output = u128;
fn rem(self, rhs: &u128) -> Self::Output {
remainder_reuse(self, *rhs)
}
}
// ['&Integer', 'u128', 'u128', 'remainder_ref', 'no', [], []]
impl Rem<u128> for &Integer {
type Output = u128;
fn rem(self, rhs: u128) -> Self::Output {
remainder_ref(self, rhs)
}
}
// ['&Integer', '&u128', 'u128', 'remainder_ref', 'no', [], ['deref']]
impl Rem<&u128> for &Integer {
type Output = u128;
fn rem(self, rhs: &u128) -> Self::Output {
remainder_ref(self, *rhs)
}
}
// ['u128', 'Integer', 'u128', 'reverse_remainder', 'no', [], ['ref']]
impl Rem<Integer> for u128 {
type Output = u128;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(self, &rhs)
}
}
// ['u128', '&Integer', 'u128', 'reverse_remainder', 'no', [], []]
impl Rem<&Integer> for u128 {
type Output = u128;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(self, rhs)
}
}
// ['&u128', 'Integer', 'u128', 'reverse_remainder', 'no', ['deref'], ['ref']]
impl Rem<Integer> for &u128 {
type Output = u128;
fn rem(self, rhs: Integer) -> Self::Output {
reverse_remainder(*self, &rhs)
}
}
// ['&u128', '&Integer', 'u128', 'reverse_remainder', 'no', ['deref'], []]
impl Rem<&Integer> for &u128 {
type Output = u128;
fn rem(self, rhs: &Integer) -> Self::Output {
reverse_remainder(*self, rhs)
}
}
|
use std::ops;
use regex::Regex;
use failure::Error;
use ::pg_interval;
use ::iso_8601;
lazy_static! {
static ref YR_RE: Regex = Regex::new(r"(\d+) year").unwrap();
static ref MO_RE: Regex = Regex::new(r"(\d+) month").unwrap();
static ref DY_RE: Regex = Regex::new(r"(\d+) day").unwrap();
static ref HR_RE: Regex = Regex::new(r"(\d+) hour").unwrap();
static ref MI_RE: Regex = Regex::new(r"(\d+) minute").unwrap();
static ref SD_RE: Regex = Regex::new(r"(\d+) second").unwrap();
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
pub struct Interval {
pub months: i32,
pub days: i32,
pub microseconds: i64,
}
impl Interval {
/// Create a new instance of interval from the months, days, and microseconds.
pub fn new(months: Option<i32>,
days: Option<i32>,
microseconds: Option<i64>) -> Interval {
Interval {
months: months.unwrap_or_default(),
days: days.unwrap_or_default(),
microseconds: microseconds.unwrap_or_default()
}
}
/// converts a postgres interval string, in 'HH24:MI:SS' format, to an Interval
pub fn from_pg_str(input: &str) -> Option<Interval> {
let mut parts = input.split(":").flat_map(|s| s.parse::<i64>().ok());
let hours = parts.next()?;
let mut us = hours * 60 * 60 * 1000000;
let mins = parts.next()?;
us += mins * 60 * 1000000;
let secs = parts.next()?;
us += mins * 1000000;
Some(Interval {microseconds: us, ..Default::default()})
}
/// "{} years {} months {} days {} hours {} minutes {} seconds"
pub fn from_human(input: &str) -> Result<Interval, Error> {
let years = YR_RE.captures(input)
.and_then(|x| Some(x[1].parse::<i32>().unwrap()))
.unwrap_or(0);
let months = MO_RE.captures(input)
.and_then(|x| Some(x[1].parse::<i32>().unwrap()))
.unwrap_or(0);
let days = DY_RE.captures(input)
.and_then(|x| Some(x[1].parse::<i32>().unwrap()))
.unwrap_or(0);
let hours = HR_RE.captures(input)
.and_then(|x| Some(x[1].parse::<i64>().unwrap()))
.unwrap_or(0);
let mins = MI_RE.captures(input)
.and_then(|x| Some(x[1].parse::<i64>().unwrap()))
.unwrap_or(0);
let secs = SD_RE.captures(input)
.and_then(|x| Some(x[1].parse::<i64>().unwrap()))
.unwrap_or(0);
let months = years * 12 + months;
let us: i64 = (hours * 60 * 60 * 1000000) +
(mins * 60 * 1000000) +
(secs * 1000000);
if [months as i64, days as i64, us as i64].iter().any(|&x| x > 0) {
return Ok(Interval {months, days, microseconds: us})
}
return Err(format_err!("Failed to parse interval string: {:?},
months: {:?}, days: {:?}, us: {:?}",
input, months, days, us))
}
/// Output the interval as iso 8601 compliant string.
pub fn to_iso_8601(&self) -> String {
let (years, months) = get_years_months(self.months);
let days = self.days;
let mut year_months_interval = iso_8601::get_year_month_interval(years, months, days);
if self.microseconds != 0 || year_months_interval.as_str() == "P" {
let (remaining_microseconds, hours) = get_hours(self.microseconds);
let (remaining_microseconds, minutes) = get_minutes(remaining_microseconds);
let seconds = get_seconds(remaining_microseconds);
let day_time_interval = iso_8601::get_day_time_interval(hours, minutes, seconds);
year_months_interval.push_str(day_time_interval.as_str());
year_months_interval
} else {
year_months_interval
}
}
/// Output the interval as a postgres interval string.
pub fn to_postgres(&self) -> String {
let (years, months) = get_years_months(self.months);
let days = self.days;
let year_months_interval = pg_interval::get_year_month_interval(years, months, days);
let (remaining_microseconds, hours) = get_hours(self.microseconds);
let (remaining_microseconds, minutes) = get_minutes(remaining_microseconds);
let seconds = get_seconds(remaining_microseconds);
if self.microseconds != 0 && year_months_interval.is_some() {
let mut ym_interval = year_months_interval.unwrap();
let day_time_interval = pg_interval::get_day_time_interval(hours, minutes, seconds);
ym_interval = ym_interval + " " + &*day_time_interval;
ym_interval
} else if year_months_interval.is_some() && self.microseconds == 0 {
year_months_interval.unwrap()
} else {
pg_interval::get_day_time_interval(hours, minutes, seconds)
}
}
/// Checked interval addition. Computes `Interval + Interval` and `None` if there
/// was an overflow.
pub fn checked_add(self, other_interval: Interval) -> Option<Interval> {
Some(Interval {
months: self.months.checked_add(other_interval.months)?,
days: self.days.checked_add(other_interval.days)?,
microseconds: self.microseconds.checked_add(other_interval.microseconds)?
})
}
/// Checked interval subtraction. Computes `Interval - Interval` and `None` if there
/// was an underflow.
pub fn checked_sub(self, other_interval: Interval) -> Option<Interval> {
Some(Interval {
months: self.months.checked_sub(other_interval.months)?,
days: self.days.checked_sub(other_interval.days)?,
microseconds: self.microseconds.checked_sub(other_interval.microseconds)?
})
}
}
impl ops::Add for Interval {
type Output = Interval;
fn add(self, other_interval: Interval) -> Interval {
Interval {
months: self.months + other_interval.months,
days: self.days + other_interval.months,
microseconds: self.microseconds + other_interval.microseconds
}
}
}
impl ops::Sub for Interval {
type Output = Interval;
fn sub(self, other_interval: Interval) -> Interval {
Interval {
months: self.months - other_interval.months,
days: self.days - other_interval.days,
microseconds: self.microseconds - other_interval.microseconds
}
}
}
// Helper function to derive the amount of years are found in the interval.
fn get_years_months(months: i32) -> (i32, i32) {
let years = (months - (months % 12))/12;
let remaining_months = months - years * 12;
(years, remaining_months)
}
// Helper function to derive the amount of hours are found in the interval.
fn get_hours(current_microseconds: i64) -> (i64, i64) {
let hours = (current_microseconds - (current_microseconds % 3600000000))/ 3600000000;
let remaining_microseconds = current_microseconds - hours * 3600000000;
(remaining_microseconds, hours)
}
// Helper function to derive the amount of minutes are found in the interval.
fn get_minutes(current_microseconds: i64) -> (i64, i64) {
let minutes = (current_microseconds - (current_microseconds % 60000000)) / 60000000;
let remaining_microseconds = current_microseconds - minutes * 60000000;
(remaining_microseconds, minutes)
}
// Helper function to derive the amount of seconds are found in the interval.
fn get_seconds(current_microseconds: i64) -> f64 {
current_microseconds as f64 / 1000000 as f64
}
#[cfg(test)]
mod tests {
use super::Interval;
#[test]
fn test_get_hours() {
let (remaining_micro, hours) = super::get_hours(3600000000);
assert_eq!(remaining_micro, 0);
assert_eq!(hours, 1);
let (remaining_micro, hours) = super::get_hours(4320000000);
assert_eq!(remaining_micro, 720000000);
assert_eq!(hours, 1);
}
#[test]
fn test_get_neg_hours() {
let (remaining_micro, hours) = super::get_hours(-3600000000);
assert_eq!(remaining_micro, 0);
assert_eq!(hours, -1);
let (remaining_micro, hours) = super::get_hours(-4320000000);
assert_eq!(remaining_micro, -720000000);
assert_eq!(hours, -1);
}
#[test]
fn test_get_minutes() {
let (remaining_micro, minutes) = super::get_minutes(60000000);
assert_eq!(remaining_micro, 0);
assert_eq!(minutes, 1);
let (remaining_micro, minutes) = super::get_minutes(75000000);
assert_eq!(remaining_micro, 15000000);
assert_eq!(minutes, 1);
}
#[test]
fn test_get_neg_minutes() {
let (remaining_micro, minutes) = super::get_minutes(-60000000);
assert_eq!(remaining_micro, 0);
assert_eq!(minutes, -1);
let (remaining_micro, minutes) = super::get_minutes(-75000000);
assert_eq!(remaining_micro, -15000000);
assert_eq!(minutes, -1);
}
#[test]
fn get_years_months_1() {
let months = 12;
let (years, months) = super::get_years_months(months);
assert_eq!(months, 0);
assert_eq!(years, 1);
let months = 14;
let (years, months) = super::get_years_months(months);
assert_eq!(years, 1);
assert_eq!(months, 2);
}
#[test]
fn get_years_months_neg_2() {
let months = -12;
let (years, months) = super::get_years_months(months);
assert_eq!(months, 0);
assert_eq!(years, -1);
let months = -14;
let (years, months) = super::get_years_months(months);
assert_eq!(years, -1);
assert_eq!(months, -2);
}
#[test]
fn test_get_seconds() {
let seconds = super::get_seconds(1000000);
assert_eq!(seconds, 1.0);
let seconds = super::get_seconds(1250000);
assert_eq!(seconds, 1.25);
}
#[test]
fn test_get_neg_seconds() {
let seconds = super::get_seconds(-1000000);
assert_eq!(seconds, -1.0);
let seconds = super::get_seconds(-1250000);
assert_eq!(seconds, -1.25);
}
#[test]
fn test_get_hours_minutes() {
let (remaining_micro, hours) = super::get_hours(4320000000);
assert_eq!(remaining_micro, 720000000);
assert_eq!(hours, 1);
let (remaining_micro, minutes) = super::get_minutes(remaining_micro);
assert_eq!(remaining_micro, 0);
assert_eq!(minutes, 12);
}
#[test]
fn test_get_neg_hours_minutes() {
let (remaining_micro, hours) = super::get_hours(-4320000000);
assert_eq!(remaining_micro, -720000000);
assert_eq!(hours, -1);
let (remaining_micro, minutes) = super::get_minutes(remaining_micro);
assert_eq!(remaining_micro, 0);
assert_eq!(minutes, -12);
}
#[test]
fn test_get_hours_minutes_seconds() {
let (remaining_micro, hours) = super::get_hours(4509000000);
assert_eq!(remaining_micro, 909000000);
assert_eq!(hours, 1);
let (remaining_micro, minutes) = super::get_minutes(remaining_micro);
assert_eq!(remaining_micro, 9000000);
assert_eq!(minutes, 15);
let seconds : f64 = super::get_seconds(remaining_micro);
assert_eq!(seconds, 9.0);
}
#[test]
fn test_get_neg_hours_minutes_seconds() {
let (remaining_micro, hours) = super::get_hours(-4509000000);
assert_eq!(remaining_micro, -909000000);
assert_eq!(hours, -1);
let (remaining_micro, minutes) = super::get_minutes(remaining_micro);
assert_eq!(remaining_micro, -9000000);
assert_eq!(minutes, -15);
let seconds : f64 = super::get_seconds(remaining_micro);
assert_eq!(seconds, -9.0);
}
#[test]
fn test_new_interval_pos() {
let interval = Interval::new(Some(1),Some(1),Some(30));
assert_eq!(interval.months, 1);
assert_eq!(interval.days, 1);
assert_eq!(interval.microseconds, 30);
}
#[test]
fn test_new_interval_neg() {
let interval = Interval::new(Some(-1),Some(-1),Some(-30));
assert_eq!(interval.months, -1);
assert_eq!(interval.days, -1);
assert_eq!(interval.microseconds, -30);
}
}
|
extern crate ndarray;
use ndarray::*;
use std::f32;
pub fn default() -> HoughFilter {
HoughFilter{
block_size: 32,
theta_resolution: 20,
slope_count_thresh: 0
}
}
pub struct HoughFilter {
pub block_size: usize,
pub theta_resolution: usize,
pub slope_count_thresh: u32
}
pub struct HoughContainer {
pub data: Array2<u32>,
pub rho_offset: usize,
}
impl HoughContainer {
pub fn new(max_rho_i: usize, max_slope_i: usize) -> Self {
let data = Array2::<u32>::zeros((max_rho_i * 2, max_slope_i));
Self { data: data, rho_offset: max_rho_i }
}
pub fn count_up(&mut self, rho_i: i32, slope_i: usize) {
self.data[[((self.rho_offset as i32) + rho_i) as usize, slope_i]] += 1;
}
pub fn max_rho_i(&self) -> i32 {
((self.data.indexed_iter().max_by_key(|e| e.1).unwrap().0).0 as i32) - (self.rho_offset as i32)
}
pub fn max_slope_i(&self) -> i32 {
(self.data.indexed_iter().max_by_key(|e| e.1).unwrap().0).1 as i32
}
pub fn max_count(&self) -> u32 {
self.data.iter().max().unwrap().clone()
}
}
impl HoughFilter {
pub fn run (&self, img: Array2<f32>) -> Array3<f32> {
let ys = img.shape()[0];
let xs = img.shape()[1];
let y_block_count: usize = ys / self.block_size;
let x_block_count: usize = xs / self.block_size;
let mut maximum_rho_slopes = Array3::<f32>::zeros((y_block_count, x_block_count, 2));
for y_block_i in 0..y_block_count {
for x_block_i in 0..x_block_count {
let yr = (y_block_i * self.block_size) as isize..((y_block_i+1) * self.block_size) as isize;
let xr = (x_block_i * self.block_size) as isize..((x_block_i+1) * self.block_size) as isize;
let block = img.slice(s![yr, xr]);
let container = self.hough_transform(block);
if container.max_count() > self.slope_count_thresh {
let max_rho_i = container.max_rho_i();
let max_slope_i = container.max_slope_i();
maximum_rho_slopes[[y_block_i, x_block_i, 0]] = self.index2slope(max_slope_i as usize);
maximum_rho_slopes[[y_block_i, x_block_i, 1]] = (max_rho_i as f32)/(self.block_size as f32);
} else {
maximum_rho_slopes[[y_block_i, x_block_i, 0]] = f32::NAN;
maximum_rho_slopes[[y_block_i, x_block_i, 1]] = f32::NAN;
}
}
}
return maximum_rho_slopes;
}
fn hough_transform(&self, block: ArrayView2<f32>) -> HoughContainer {
let ys = block.shape()[0];
let xs = block.shape()[1];
let ys_f = ys as f32;
let xs_f = xs as f32;
let max_rho_i = (f32::sqrt(xs_f.powi(2)+ys_f.powi(2)+xs_f*ys_f) + 1.) as usize;
let max_slope_i = self.theta_resolution as usize;
// y方向: rho, x方向: theta_i
let mut container = HoughContainer::new(max_rho_i, max_slope_i);
for y in 0..ys {
for x in 0..xs {
if block[[y, x]] > 0. {
for slope_i in 0..max_slope_i {
let slope = self.index2slope(slope_i);
let rho_i = self.calc_rho(x as f32, y as f32, slope) as i32;
container.count_up(rho_i, slope_i);
}
}
}
}
return container;
}
fn index2slope(&self, slope_i: usize) -> f32 {
return (slope_i as f32 / self.theta_resolution as f32) * f32::consts::PI;
}
fn calc_rho(&self, x: f32, y: f32, slope: f32) -> f32 {
let rho = x * f32::cos(slope) + y * f32::sin(slope);
return rho
}
}
|
#[doc = "Reader of register DINR18"]
pub type R = crate::R<u32, super::DINR18>;
#[doc = "Reader of field `DIN18`"]
pub type DIN18_R = crate::R<u16, u16>;
impl R {
#[doc = "Bits 0:15 - Input data received from MDIO Master during write frames"]
#[inline(always)]
pub fn din18(&self) -> DIN18_R {
DIN18_R::new((self.bits & 0xffff) as u16)
}
}
|
//! A Postgres backed implementation of the Catalog
use crate::interface::MAX_PARQUET_FILES_SELECTED_ONCE_FOR_DELETE;
use crate::{
interface::{
self, CasFailure, Catalog, ColumnRepo, ColumnTypeMismatchSnafu, Error, NamespaceRepo,
ParquetFileRepo, PartitionRepo, RepoCollection, Result, SoftDeletedRows, TableRepo,
MAX_PARQUET_FILES_SELECTED_ONCE_FOR_RETENTION,
},
kafkaless_transition::{
SHARED_QUERY_POOL, SHARED_QUERY_POOL_ID, SHARED_TOPIC_ID, SHARED_TOPIC_NAME,
TRANSITION_SHARD_ID, TRANSITION_SHARD_INDEX,
},
metrics::MetricDecorator,
migrate::IOxMigrator,
DEFAULT_MAX_COLUMNS_PER_TABLE, DEFAULT_MAX_TABLES,
};
use async_trait::async_trait;
use data_types::{
partition_template::{
NamespacePartitionTemplateOverride, TablePartitionTemplateOverride, TemplatePart,
},
Column, ColumnType, CompactionLevel, Namespace, NamespaceId, NamespaceName,
NamespaceServiceProtectionLimitsOverride, ParquetFile, ParquetFileId, ParquetFileParams,
Partition, PartitionHashId, PartitionId, PartitionKey, SkippedCompaction, Table, TableId,
Timestamp, TransitionPartitionId,
};
use iox_time::{SystemProvider, TimeProvider};
use metric::{Attributes, Instrument, MetricKind};
use observability_deps::tracing::{debug, info, warn};
use once_cell::sync::Lazy;
use parking_lot::{RwLock, RwLockWriteGuard};
use snafu::prelude::*;
use sqlx::{
postgres::{PgConnectOptions, PgPoolOptions},
types::Uuid,
Acquire, ConnectOptions, Executor, Postgres, Row,
};
use sqlx_hotswap_pool::HotSwapPool;
use std::borrow::Cow;
use std::collections::HashSet;
use std::sync::atomic::{AtomicU64, Ordering};
use std::{collections::HashMap, fmt::Display, str::FromStr, sync::Arc, time::Duration};
static MIGRATOR: Lazy<IOxMigrator> =
Lazy::new(|| IOxMigrator::try_from(&sqlx::migrate!()).expect("valid migration"));
/// Postgres connection options.
#[derive(Debug, Clone)]
pub struct PostgresConnectionOptions {
/// Application name.
///
/// This will be reported to postgres.
pub app_name: String,
/// Schema name.
pub schema_name: String,
/// DSN.
pub dsn: String,
/// Maximum number of concurrent connections.
pub max_conns: u32,
/// Set the amount of time to attempt connecting to the database.
pub connect_timeout: Duration,
/// Set a maximum idle duration for individual connections.
pub idle_timeout: Duration,
/// If the DSN points to a file (i.e. starts with `dsn-file://`), this sets the interval how often the the file
/// should be polled for updates.
///
/// If an update is encountered, the underlying connection pool will be hot-swapped.
pub hotswap_poll_interval: Duration,
}
impl PostgresConnectionOptions {
/// Default value for [`schema_name`](Self::schema_name).
pub const DEFAULT_SCHEMA_NAME: &'static str = "iox_catalog";
/// Default value for [`max_conns`](Self::max_conns).
pub const DEFAULT_MAX_CONNS: u32 = 10;
/// Default value for [`connect_timeout`](Self::connect_timeout).
pub const DEFAULT_CONNECT_TIMEOUT: Duration = Duration::from_secs(2);
/// Default value for [`idle_timeout`](Self::idle_timeout).
pub const DEFAULT_IDLE_TIMEOUT: Duration = Duration::from_secs(10);
/// Default value for [`hotswap_poll_interval`](Self::hotswap_poll_interval).
pub const DEFAULT_HOTSWAP_POLL_INTERVAL: Duration = Duration::from_secs(5);
}
impl Default for PostgresConnectionOptions {
fn default() -> Self {
Self {
app_name: String::from("iox"),
schema_name: String::from(Self::DEFAULT_SCHEMA_NAME),
dsn: String::new(),
max_conns: Self::DEFAULT_MAX_CONNS,
connect_timeout: Self::DEFAULT_CONNECT_TIMEOUT,
idle_timeout: Self::DEFAULT_IDLE_TIMEOUT,
hotswap_poll_interval: Self::DEFAULT_HOTSWAP_POLL_INTERVAL,
}
}
}
/// PostgreSQL catalog.
#[derive(Debug)]
pub struct PostgresCatalog {
metrics: Arc<metric::Registry>,
pool: HotSwapPool<Postgres>,
time_provider: Arc<dyn TimeProvider>,
// Connection options for display
options: PostgresConnectionOptions,
}
impl PostgresCatalog {
/// Connect to the catalog store.
pub async fn connect(
options: PostgresConnectionOptions,
metrics: Arc<metric::Registry>,
) -> Result<Self> {
let pool = new_pool(&options, Arc::clone(&metrics))
.await
.map_err(|e| Error::SqlxError { source: e })?;
Ok(Self {
pool,
metrics,
time_provider: Arc::new(SystemProvider::new()),
options,
})
}
fn schema_name(&self) -> &str {
&self.options.schema_name
}
#[cfg(test)]
pub(crate) fn into_pool(self) -> HotSwapPool<Postgres> {
self.pool
}
}
impl Display for PostgresCatalog {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
// Do not include dsn in log as it may have credentials
// that should not end up in the log
"Postgres(dsn=OMITTED, schema_name='{}')",
self.schema_name()
)
}
}
/// transaction for [`PostgresCatalog`].
#[derive(Debug)]
pub struct PostgresTxn {
inner: PostgresTxnInner,
time_provider: Arc<dyn TimeProvider>,
}
#[derive(Debug)]
struct PostgresTxnInner {
pool: HotSwapPool<Postgres>,
}
impl<'c> Executor<'c> for &'c mut PostgresTxnInner {
type Database = Postgres;
#[allow(clippy::type_complexity)]
fn fetch_many<'e, 'q: 'e, E: 'q>(
self,
query: E,
) -> futures::stream::BoxStream<
'e,
Result<
sqlx::Either<
<Self::Database as sqlx::Database>::QueryResult,
<Self::Database as sqlx::Database>::Row,
>,
sqlx::Error,
>,
>
where
'c: 'e,
E: sqlx::Execute<'q, Self::Database>,
{
self.pool.fetch_many(query)
}
fn fetch_optional<'e, 'q: 'e, E: 'q>(
self,
query: E,
) -> futures::future::BoxFuture<
'e,
Result<Option<<Self::Database as sqlx::Database>::Row>, sqlx::Error>,
>
where
'c: 'e,
E: sqlx::Execute<'q, Self::Database>,
{
self.pool.fetch_optional(query)
}
fn prepare_with<'e, 'q: 'e>(
self,
sql: &'q str,
parameters: &'e [<Self::Database as sqlx::Database>::TypeInfo],
) -> futures::future::BoxFuture<
'e,
Result<<Self::Database as sqlx::database::HasStatement<'q>>::Statement, sqlx::Error>,
>
where
'c: 'e,
{
self.pool.prepare_with(sql, parameters)
}
fn describe<'e, 'q: 'e>(
self,
sql: &'q str,
) -> futures::future::BoxFuture<'e, Result<sqlx::Describe<Self::Database>, sqlx::Error>>
where
'c: 'e,
{
self.pool.describe(sql)
}
}
#[async_trait]
impl Catalog for PostgresCatalog {
async fn setup(&self) -> Result<(), Error> {
// We need to create the schema if we're going to set it as the first item of the
// search_path otherwise when we run the sqlx migration scripts for the first time, sqlx
// will create the `_sqlx_migrations` table in the public namespace (the only namespace
// that exists), but the second time it will create it in the `<schema_name>` namespace and
// re-run all the migrations without skipping the ones already applied (see #3893).
//
// This makes the migrations/20210217134322_create_schema.sql step unnecessary; we need to
// keep that file because migration files are immutable.
let create_schema_query = format!("CREATE SCHEMA IF NOT EXISTS {};", self.schema_name());
self.pool
.execute(sqlx::query(&create_schema_query))
.await
.map_err(|e| Error::Setup { source: e })?;
MIGRATOR
.run(&self.pool)
.await
.map_err(|e| Error::Setup { source: e.into() })?;
// We need to manually insert the topic here so that we can create the transition shard
// below.
sqlx::query(
r#"
INSERT INTO topic (name)
VALUES ($1)
ON CONFLICT ON CONSTRAINT topic_name_unique
DO NOTHING;
"#,
)
.bind(SHARED_TOPIC_NAME)
.execute(&self.pool)
.await
.map_err(|e| Error::Setup { source: e })?;
// The transition shard must exist and must have magic ID and INDEX.
sqlx::query(
r#"
INSERT INTO shard (id, topic_id, shard_index, min_unpersisted_sequence_number)
OVERRIDING SYSTEM VALUE
VALUES ($1, $2, $3, 0)
ON CONFLICT ON CONSTRAINT shard_unique
DO NOTHING;
"#,
)
.bind(TRANSITION_SHARD_ID)
.bind(SHARED_TOPIC_ID)
.bind(TRANSITION_SHARD_INDEX)
.execute(&self.pool)
.await
.map_err(|e| Error::Setup { source: e })?;
// We need to manually insert the query pool here so that we can create namespaces that
// reference it.
sqlx::query(
r#"
INSERT INTO query_pool (name)
VALUES ($1)
ON CONFLICT ON CONSTRAINT query_pool_name_unique
DO NOTHING;
"#,
)
.bind(SHARED_QUERY_POOL)
.execute(&self.pool)
.await
.map_err(|e| Error::Setup { source: e })?;
Ok(())
}
async fn repositories(&self) -> Box<dyn RepoCollection> {
Box::new(MetricDecorator::new(
PostgresTxn {
inner: PostgresTxnInner {
pool: self.pool.clone(),
},
time_provider: Arc::clone(&self.time_provider),
},
Arc::clone(&self.metrics),
))
}
#[cfg(test)]
fn metrics(&self) -> Arc<metric::Registry> {
Arc::clone(&self.metrics)
}
fn time_provider(&self) -> Arc<dyn TimeProvider> {
Arc::clone(&self.time_provider)
}
}
/// Adapter to connect sqlx pools with our metrics system.
#[derive(Debug, Clone, Default)]
struct PoolMetrics {
/// Actual shared state.
state: Arc<PoolMetricsInner>,
}
/// Inner state of [`PoolMetrics`] that is wrapped into an [`Arc`].
#[derive(Debug, Default)]
struct PoolMetricsInner {
/// Next pool ID.
pool_id_gen: AtomicU64,
/// Set of known pools and their ID labels.
///
/// Note: The pool is internally ref-counted via an [`Arc`]. Holding a reference does NOT prevent it from being closed.
pools: RwLock<Vec<(Arc<str>, sqlx::Pool<Postgres>)>>,
}
impl PoolMetrics {
/// Create new pool metrics.
fn new(metrics: Arc<metric::Registry>) -> Self {
metrics.register_instrument("iox_catalog_postgres", Self::default)
}
/// Register a new pool.
fn register_pool(&self, pool: sqlx::Pool<Postgres>) {
let id = self
.state
.pool_id_gen
.fetch_add(1, Ordering::SeqCst)
.to_string()
.into();
let mut pools = self.state.pools.write();
pools.push((id, pool));
}
/// Remove closed pools from given list.
fn clean_pools(pools: &mut Vec<(Arc<str>, sqlx::Pool<Postgres>)>) {
pools.retain(|(_id, p)| !p.is_closed());
}
}
impl Instrument for PoolMetrics {
fn report(&self, reporter: &mut dyn metric::Reporter) {
let mut pools = self.state.pools.write();
Self::clean_pools(&mut pools);
let pools = RwLockWriteGuard::downgrade(pools);
reporter.start_metric(
"sqlx_postgres_pools",
"Number of pools that sqlx uses",
MetricKind::U64Gauge,
);
reporter.report_observation(
&Attributes::from([]),
metric::Observation::U64Gauge(pools.len() as u64),
);
reporter.finish_metric();
reporter.start_metric(
"sqlx_postgres_connections",
"Number of connections within the postgres connection pool that sqlx uses",
MetricKind::U64Gauge,
);
for (id, p) in pools.iter() {
reporter.report_observation(
&Attributes::from([
("pool_id", Cow::Owned(id.as_ref().to_owned())),
("state", Cow::Borrowed("active")),
]),
metric::Observation::U64Gauge(p.size() as u64),
);
reporter.report_observation(
&Attributes::from([
("pool_id", Cow::Owned(id.as_ref().to_owned())),
("state", Cow::Borrowed("idle")),
]),
metric::Observation::U64Gauge(p.num_idle() as u64),
);
reporter.report_observation(
&Attributes::from([
("pool_id", Cow::Owned(id.as_ref().to_owned())),
("state", Cow::Borrowed("max")),
]),
metric::Observation::U64Gauge(p.options().get_max_connections() as u64),
);
reporter.report_observation(
&Attributes::from([
("pool_id", Cow::Owned(id.as_ref().to_owned())),
("state", Cow::Borrowed("min")),
]),
metric::Observation::U64Gauge(p.options().get_min_connections() as u64),
);
}
reporter.finish_metric();
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
}
/// Creates a new [`sqlx::Pool`] from a database config and an explicit DSN.
///
/// This function doesn't support the IDPE specific `dsn-file://` uri scheme.
async fn new_raw_pool(
options: &PostgresConnectionOptions,
parsed_dsn: &str,
metrics: PoolMetrics,
) -> Result<sqlx::Pool<Postgres>, sqlx::Error> {
// sqlx exposes some options as pool options, while other options are available as connection options.
let connect_options = PgConnectOptions::from_str(parsed_dsn)?
// the default is INFO, which is frankly surprising.
.log_statements(log::LevelFilter::Trace);
let app_name = options.app_name.clone();
let app_name2 = options.app_name.clone(); // just to log below
let schema_name = options.schema_name.clone();
let pool = PgPoolOptions::new()
.min_connections(1)
.max_connections(options.max_conns)
.acquire_timeout(options.connect_timeout)
.idle_timeout(options.idle_timeout)
.test_before_acquire(true)
.after_connect(move |c, _meta| {
let app_name = app_name.to_owned();
let schema_name = schema_name.to_owned();
Box::pin(async move {
// Tag the connection with the provided application name, while allowing it to
// be override from the connection string (aka DSN).
// If current_application_name is empty here it means the application name wasn't
// set as part of the DSN, and we can set it explicitly.
// Recall that this block is running on connection, not when creating the pool!
let current_application_name: String =
sqlx::query_scalar("SELECT current_setting('application_name');")
.fetch_one(&mut *c)
.await?;
if current_application_name.is_empty() {
sqlx::query("SELECT set_config('application_name', $1, false);")
.bind(&*app_name)
.execute(&mut *c)
.await?;
}
let search_path_query = format!("SET search_path TO {schema_name},public;");
c.execute(sqlx::query(&search_path_query)).await?;
// Ensure explicit timezone selection, instead of deferring to
// the server value.
c.execute("SET timezone = 'UTC';").await?;
Ok(())
})
})
.connect_with(connect_options)
.await?;
// Log a connection was successfully established and include the application
// name for cross-correlation between Conductor logs & database connections.
info!(application_name=%app_name2, "connected to config store");
metrics.register_pool(pool.clone());
Ok(pool)
}
/// Parse a postgres catalog dsn, handling the special `dsn-file://`
/// syntax (see [`new_pool`] for more details).
///
/// Returns an error if the dsn-file could not be read correctly.
pub fn parse_dsn(dsn: &str) -> Result<String, sqlx::Error> {
let dsn = match get_dsn_file_path(dsn) {
Some(filename) => std::fs::read_to_string(filename)?,
None => dsn.to_string(),
};
Ok(dsn)
}
/// Creates a new HotSwapPool
///
/// This function understands the IDPE specific `dsn-file://` dsn uri scheme
/// and hot swaps the pool with a new sqlx::Pool when the file changes.
/// This is useful because the credentials can be rotated by infrastructure
/// agents while the service is running.
///
/// The file is polled for changes every `polling_interval`.
///
/// The pool is replaced only once the new pool is successfully created.
/// The [`new_raw_pool`] function will return a new pool only if the connection
/// is successfull (see [`sqlx::pool::PoolOptions::test_before_acquire`]).
async fn new_pool(
options: &PostgresConnectionOptions,
metrics: Arc<metric::Registry>,
) -> Result<HotSwapPool<Postgres>, sqlx::Error> {
let parsed_dsn = parse_dsn(&options.dsn)?;
let metrics = PoolMetrics::new(metrics);
let pool = HotSwapPool::new(new_raw_pool(options, &parsed_dsn, metrics.clone()).await?);
let polling_interval = options.hotswap_poll_interval;
if let Some(dsn_file) = get_dsn_file_path(&options.dsn) {
let pool = pool.clone();
let options = options.clone();
// TODO(mkm): return a guard that stops this background worker.
// We create only one pool per process, but it would be cleaner to be
// able to properly destroy the pool. If we don't kill this worker we
// effectively keep the pool alive (since it holds a reference to the
// Pool) and we also potentially pollute the logs with spurious warnings
// if the dsn file disappears (this may be annoying if they show up in the test
// logs).
tokio::spawn(async move {
let mut current_dsn = parsed_dsn.clone();
loop {
tokio::time::sleep(polling_interval).await;
async fn try_update(
options: &PostgresConnectionOptions,
current_dsn: &str,
dsn_file: &str,
pool: &HotSwapPool<Postgres>,
metrics: PoolMetrics,
) -> Result<Option<String>, sqlx::Error> {
let new_dsn = std::fs::read_to_string(dsn_file)?;
if new_dsn == current_dsn {
Ok(None)
} else {
let new_pool = new_raw_pool(options, &new_dsn, metrics).await?;
let old_pool = pool.replace(new_pool);
info!("replaced hotswap pool");
info!(?old_pool, "closing old DB connection pool");
// The pool is not closed on drop. We need to call `close`.
// It will close all idle connections, and wait until acquired connections
// are returned to the pool or closed.
old_pool.close().await;
info!(?old_pool, "closed old DB connection pool");
Ok(Some(new_dsn))
}
}
match try_update(&options, ¤t_dsn, &dsn_file, &pool, metrics.clone()).await {
Ok(None) => {}
Ok(Some(new_dsn)) => {
current_dsn = new_dsn;
}
Err(e) => {
warn!(
error=%e,
filename=%dsn_file,
"not replacing hotswap pool because of an error \
connecting to the new DSN"
);
}
}
}
});
}
Ok(pool)
}
// Parses a `dsn-file://` scheme, according to the rules of the IDPE kit/sql package.
//
// If the dsn matches the `dsn-file://` prefix, the prefix is removed and the rest is interpreted
// as a file name, in which case this function will return `Some(filename)`.
// Otherwise it will return None. No URI decoding is performed on the filename.
fn get_dsn_file_path(dsn: &str) -> Option<String> {
const DSN_SCHEME: &str = "dsn-file://";
dsn.starts_with(DSN_SCHEME)
.then(|| dsn[DSN_SCHEME.len()..].to_owned())
}
#[async_trait]
impl RepoCollection for PostgresTxn {
fn namespaces(&mut self) -> &mut dyn NamespaceRepo {
self
}
fn tables(&mut self) -> &mut dyn TableRepo {
self
}
fn columns(&mut self) -> &mut dyn ColumnRepo {
self
}
fn partitions(&mut self) -> &mut dyn PartitionRepo {
self
}
fn parquet_files(&mut self) -> &mut dyn ParquetFileRepo {
self
}
}
async fn insert_column_with_connection<'q, E>(
executor: E,
name: &str,
table_id: TableId,
column_type: ColumnType,
) -> Result<Column>
where
E: Executor<'q, Database = Postgres>,
{
let rec = sqlx::query_as::<_, Column>(
r#"
INSERT INTO column_name ( name, table_id, column_type )
SELECT $1, table_id, $3 FROM (
SELECT max_columns_per_table, namespace.id, table_name.id as table_id, COUNT(column_name.*) AS count
FROM namespace LEFT JOIN table_name ON namespace.id = table_name.namespace_id
LEFT JOIN column_name ON table_name.id = column_name.table_id
WHERE table_name.id = $2
GROUP BY namespace.max_columns_per_table, namespace.id, table_name.id
) AS get_count WHERE count < max_columns_per_table
ON CONFLICT ON CONSTRAINT column_name_unique
DO UPDATE SET name = column_name.name
RETURNING *;
"#,
)
.bind(name) // $1
.bind(table_id) // $2
.bind(column_type) // $3
.fetch_one(executor)
.await
.map_err(|e| match e {
sqlx::Error::RowNotFound => Error::ColumnCreateLimitError {
column_name: name.to_string(),
table_id,
},
_ => {
if is_fk_violation(&e) {
Error::ForeignKeyViolation { source: e }
} else {
Error::SqlxError { source: e }
}
}})?;
ensure!(
rec.column_type == column_type,
ColumnTypeMismatchSnafu {
name,
existing: rec.column_type,
new: column_type,
}
);
Ok(rec)
}
#[async_trait]
impl NamespaceRepo for PostgresTxn {
async fn create(
&mut self,
name: &NamespaceName<'_>,
partition_template: Option<NamespacePartitionTemplateOverride>,
retention_period_ns: Option<i64>,
service_protection_limits: Option<NamespaceServiceProtectionLimitsOverride>,
) -> Result<Namespace> {
let max_tables = service_protection_limits.and_then(|l| l.max_tables);
let max_columns_per_table = service_protection_limits.and_then(|l| l.max_columns_per_table);
let rec = sqlx::query_as::<_, Namespace>(
r#"
INSERT INTO namespace (
name, topic_id, query_pool_id, retention_period_ns, max_tables, max_columns_per_table, partition_template
)
VALUES ( $1, $2, $3, $4, $5, $6, $7 )
RETURNING id, name, retention_period_ns, max_tables, max_columns_per_table, deleted_at,
partition_template;
"#,
)
.bind(name.as_str()) // $1
.bind(SHARED_TOPIC_ID) // $2
.bind(SHARED_QUERY_POOL_ID) // $3
.bind(retention_period_ns) // $4
.bind(max_tables.unwrap_or(DEFAULT_MAX_TABLES)) // $5
.bind(max_columns_per_table.unwrap_or(DEFAULT_MAX_COLUMNS_PER_TABLE)) // $6
.bind(partition_template); // $7
let rec = rec.fetch_one(&mut self.inner).await.map_err(|e| {
if is_unique_violation(&e) {
Error::NameExists {
name: name.to_string(),
}
} else if is_fk_violation(&e) {
Error::ForeignKeyViolation { source: e }
} else {
Error::SqlxError { source: e }
}
})?;
Ok(rec)
}
async fn list(&mut self, deleted: SoftDeletedRows) -> Result<Vec<Namespace>> {
let rec = sqlx::query_as::<_, Namespace>(
format!(
r#"
SELECT id, name, retention_period_ns, max_tables, max_columns_per_table, deleted_at,
partition_template
FROM namespace
WHERE {v};
"#,
v = deleted.as_sql_predicate()
)
.as_str(),
)
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })?;
Ok(rec)
}
async fn get_by_id(
&mut self,
id: NamespaceId,
deleted: SoftDeletedRows,
) -> Result<Option<Namespace>> {
let rec = sqlx::query_as::<_, Namespace>(
format!(
r#"
SELECT id, name, retention_period_ns, max_tables, max_columns_per_table, deleted_at,
partition_template
FROM namespace
WHERE id=$1 AND {v};
"#,
v = deleted.as_sql_predicate()
)
.as_str(),
)
.bind(id) // $1
.fetch_one(&mut self.inner)
.await;
if let Err(sqlx::Error::RowNotFound) = rec {
return Ok(None);
}
let namespace = rec.map_err(|e| Error::SqlxError { source: e })?;
Ok(Some(namespace))
}
async fn get_by_name(
&mut self,
name: &str,
deleted: SoftDeletedRows,
) -> Result<Option<Namespace>> {
let rec = sqlx::query_as::<_, Namespace>(
format!(
r#"
SELECT id, name, retention_period_ns, max_tables, max_columns_per_table, deleted_at,
partition_template
FROM namespace
WHERE name=$1 AND {v};
"#,
v = deleted.as_sql_predicate()
)
.as_str(),
)
.bind(name) // $1
.fetch_one(&mut self.inner)
.await;
if let Err(sqlx::Error::RowNotFound) = rec {
return Ok(None);
}
let namespace = rec.map_err(|e| Error::SqlxError { source: e })?;
Ok(Some(namespace))
}
async fn soft_delete(&mut self, name: &str) -> Result<()> {
let flagged_at = Timestamp::from(self.time_provider.now());
// note that there is a uniqueness constraint on the name column in the DB
sqlx::query(r#"UPDATE namespace SET deleted_at=$1 WHERE name = $2;"#)
.bind(flagged_at) // $1
.bind(name) // $2
.execute(&mut self.inner)
.await
.context(interface::CouldNotDeleteNamespaceSnafu)
.map(|_| ())
}
async fn update_table_limit(&mut self, name: &str, new_max: i32) -> Result<Namespace> {
let rec = sqlx::query_as::<_, Namespace>(
r#"
UPDATE namespace
SET max_tables = $1
WHERE name = $2
RETURNING id, name, retention_period_ns, max_tables, max_columns_per_table, deleted_at,
partition_template;
"#,
)
.bind(new_max)
.bind(name)
.fetch_one(&mut self.inner)
.await;
let namespace = rec.map_err(|e| match e {
sqlx::Error::RowNotFound => Error::NamespaceNotFoundByName {
name: name.to_string(),
},
_ => Error::SqlxError { source: e },
})?;
Ok(namespace)
}
async fn update_column_limit(&mut self, name: &str, new_max: i32) -> Result<Namespace> {
let rec = sqlx::query_as::<_, Namespace>(
r#"
UPDATE namespace
SET max_columns_per_table = $1
WHERE name = $2
RETURNING id, name, retention_period_ns, max_tables, max_columns_per_table, deleted_at,
partition_template;
"#,
)
.bind(new_max)
.bind(name)
.fetch_one(&mut self.inner)
.await;
let namespace = rec.map_err(|e| match e {
sqlx::Error::RowNotFound => Error::NamespaceNotFoundByName {
name: name.to_string(),
},
_ => Error::SqlxError { source: e },
})?;
Ok(namespace)
}
async fn update_retention_period(
&mut self,
name: &str,
retention_period_ns: Option<i64>,
) -> Result<Namespace> {
let rec = sqlx::query_as::<_, Namespace>(
r#"
UPDATE namespace
SET retention_period_ns = $1
WHERE name = $2
RETURNING id, name, retention_period_ns, max_tables, max_columns_per_table, deleted_at,
partition_template;
"#,
)
.bind(retention_period_ns) // $1
.bind(name) // $2
.fetch_one(&mut self.inner)
.await;
let namespace = rec.map_err(|e| match e {
sqlx::Error::RowNotFound => Error::NamespaceNotFoundByName {
name: name.to_string(),
},
_ => Error::SqlxError { source: e },
})?;
Ok(namespace)
}
}
#[async_trait]
impl TableRepo for PostgresTxn {
async fn create(
&mut self,
name: &str,
partition_template: TablePartitionTemplateOverride,
namespace_id: NamespaceId,
) -> Result<Table> {
let mut tx = self
.inner
.pool
.begin()
.await
.map_err(|e| Error::StartTransaction { source: e })?;
// A simple insert statement becomes quite complicated in order to avoid checking the table
// limits in a select and then conditionally inserting (which would be racey).
//
// from https://www.postgresql.org/docs/current/sql-insert.html
// "INSERT inserts new rows into a table. One can insert one or more rows specified by
// value expressions, or zero or more rows resulting from a query."
// By using SELECT rather than VALUES it will insert zero rows if it finds a null in the
// subquery, i.e. if count >= max_tables. fetch_one() will return a RowNotFound error if
// nothing was inserted. Not pretty!
let table = sqlx::query_as::<_, Table>(
r#"
INSERT INTO table_name ( name, namespace_id, partition_template )
SELECT $1, id, $2 FROM (
SELECT namespace.id AS id, max_tables, COUNT(table_name.*) AS count
FROM namespace LEFT JOIN table_name ON namespace.id = table_name.namespace_id
WHERE namespace.id = $3
GROUP BY namespace.max_tables, table_name.namespace_id, namespace.id
) AS get_count WHERE count < max_tables
RETURNING *;
"#,
)
.bind(name) // $1
.bind(partition_template) // $2
.bind(namespace_id) // $3
.fetch_one(&mut *tx)
.await
.map_err(|e| match e {
sqlx::Error::RowNotFound => Error::TableCreateLimitError {
table_name: name.to_string(),
namespace_id,
},
_ => {
if is_unique_violation(&e) {
Error::TableNameExists {
name: name.to_string(),
namespace_id,
}
} else if is_fk_violation(&e) {
Error::ForeignKeyViolation { source: e }
} else {
Error::SqlxError { source: e }
}
}
})?;
// Partitioning is only supported for tags, so create tag columns for all `TagValue`
// partition template parts. It's important this happens within the table creation
// transaction so that there isn't a possibility of a concurrent write creating these
// columns with an unsupported type.
for template_part in table.partition_template.parts() {
if let TemplatePart::TagValue(tag_name) = template_part {
insert_column_with_connection(&mut *tx, tag_name, table.id, ColumnType::Tag)
.await?;
}
}
tx.commit()
.await
.map_err(|source| Error::FailedToCommit { source })?;
Ok(table)
}
async fn get_by_id(&mut self, table_id: TableId) -> Result<Option<Table>> {
let rec = sqlx::query_as::<_, Table>(
r#"
SELECT *
FROM table_name
WHERE id = $1;
"#,
)
.bind(table_id) // $1
.fetch_one(&mut self.inner)
.await;
if let Err(sqlx::Error::RowNotFound) = rec {
return Ok(None);
}
let table = rec.map_err(|e| Error::SqlxError { source: e })?;
Ok(Some(table))
}
async fn get_by_namespace_and_name(
&mut self,
namespace_id: NamespaceId,
name: &str,
) -> Result<Option<Table>> {
let rec = sqlx::query_as::<_, Table>(
r#"
SELECT *
FROM table_name
WHERE namespace_id = $1 AND name = $2;
"#,
)
.bind(namespace_id) // $1
.bind(name) // $2
.fetch_one(&mut self.inner)
.await;
if let Err(sqlx::Error::RowNotFound) = rec {
return Ok(None);
}
let table = rec.map_err(|e| Error::SqlxError { source: e })?;
Ok(Some(table))
}
async fn list_by_namespace_id(&mut self, namespace_id: NamespaceId) -> Result<Vec<Table>> {
let rec = sqlx::query_as::<_, Table>(
r#"
SELECT *
FROM table_name
WHERE namespace_id = $1;
"#,
)
.bind(namespace_id)
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })?;
Ok(rec)
}
async fn list(&mut self) -> Result<Vec<Table>> {
let rec = sqlx::query_as::<_, Table>("SELECT * FROM table_name;")
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })?;
Ok(rec)
}
}
#[async_trait]
impl ColumnRepo for PostgresTxn {
async fn create_or_get(
&mut self,
name: &str,
table_id: TableId,
column_type: ColumnType,
) -> Result<Column> {
insert_column_with_connection(&mut self.inner, name, table_id, column_type).await
}
async fn list_by_namespace_id(&mut self, namespace_id: NamespaceId) -> Result<Vec<Column>> {
let rec = sqlx::query_as::<_, Column>(
r#"
SELECT column_name.* FROM table_name
INNER JOIN column_name on column_name.table_id = table_name.id
WHERE table_name.namespace_id = $1;
"#,
)
.bind(namespace_id)
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })?;
Ok(rec)
}
async fn list_by_table_id(&mut self, table_id: TableId) -> Result<Vec<Column>> {
let rec = sqlx::query_as::<_, Column>(
r#"
SELECT * FROM column_name
WHERE table_id = $1;
"#,
)
.bind(table_id)
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })?;
Ok(rec)
}
async fn list(&mut self) -> Result<Vec<Column>> {
let rec = sqlx::query_as::<_, Column>("SELECT * FROM column_name;")
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })?;
Ok(rec)
}
async fn create_or_get_many_unchecked(
&mut self,
table_id: TableId,
columns: HashMap<&str, ColumnType>,
) -> Result<Vec<Column>> {
let num_columns = columns.len();
let (v_name, v_column_type): (Vec<&str>, Vec<i16>) = columns
.iter()
.map(|(&name, &column_type)| (name, column_type as i16))
.unzip();
// The `ORDER BY` in this statement is important to avoid deadlocks during concurrent
// writes to the same IOx table that each add many new columns. See:
//
// - <https://rcoh.svbtle.com/postgres-unique-constraints-can-cause-deadlock>
// - <https://dba.stackexchange.com/a/195220/27897>
// - <https://github.com/influxdata/idpe/issues/16298>
let out = sqlx::query_as::<_, Column>(
r#"
INSERT INTO column_name ( name, table_id, column_type )
SELECT name, $1, column_type
FROM UNNEST($2, $3) as a(name, column_type)
ORDER BY name
ON CONFLICT ON CONSTRAINT column_name_unique
DO UPDATE SET name = column_name.name
RETURNING *;
"#,
)
.bind(table_id) // $1
.bind(&v_name) // $2
.bind(&v_column_type) // $3
.fetch_all(&mut self.inner)
.await
.map_err(|e| {
if is_fk_violation(&e) {
Error::ForeignKeyViolation { source: e }
} else {
Error::SqlxError { source: e }
}
})?;
assert_eq!(num_columns, out.len());
for existing in &out {
let want = columns.get(existing.name.as_str()).unwrap();
ensure!(
existing.column_type == *want,
ColumnTypeMismatchSnafu {
name: &existing.name,
existing: existing.column_type,
new: *want,
}
);
}
Ok(out)
}
}
#[async_trait]
impl PartitionRepo for PostgresTxn {
async fn create_or_get(&mut self, key: PartitionKey, table_id: TableId) -> Result<Partition> {
// Note: since sort_key is now an array, we must explicitly insert '{}' which is an empty
// array rather than NULL which sqlx will throw `UnexpectedNullError` while is is doing
// `ColumnDecode`
let hash_id = PartitionHashId::new(table_id, &key);
let v = sqlx::query_as::<_, Partition>(
r#"
INSERT INTO partition
(partition_key, shard_id, table_id, hash_id, sort_key)
VALUES
( $1, $2, $3, $4, '{}')
ON CONFLICT ON CONSTRAINT partition_key_unique
DO UPDATE SET partition_key = partition.partition_key
RETURNING id, hash_id, table_id, partition_key, sort_key, new_file_at;
"#,
)
.bind(key) // $1
.bind(TRANSITION_SHARD_ID) // $2
.bind(table_id) // $3
.bind(&hash_id) // $4
.fetch_one(&mut self.inner)
.await
.map_err(|e| {
if is_fk_violation(&e) {
Error::ForeignKeyViolation { source: e }
} else {
Error::SqlxError { source: e }
}
})?;
Ok(v)
}
async fn get_by_id(&mut self, partition_id: PartitionId) -> Result<Option<Partition>> {
let rec = sqlx::query_as::<_, Partition>(
r#"
SELECT id, hash_id, table_id, partition_key, sort_key, new_file_at
FROM partition
WHERE id = $1;
"#,
)
.bind(partition_id) // $1
.fetch_one(&mut self.inner)
.await;
if let Err(sqlx::Error::RowNotFound) = rec {
return Ok(None);
}
let partition = rec.map_err(|e| Error::SqlxError { source: e })?;
Ok(Some(partition))
}
async fn get_by_id_batch(&mut self, partition_ids: Vec<PartitionId>) -> Result<Vec<Partition>> {
let ids: Vec<_> = partition_ids.iter().map(|p| p.get()).collect();
sqlx::query_as::<_, Partition>(
r#"
SELECT id, hash_id, table_id, partition_key, sort_key, new_file_at
FROM partition
WHERE id = ANY($1);
"#,
)
.bind(&ids[..]) // $1
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })
}
async fn get_by_hash_id(
&mut self,
partition_hash_id: &PartitionHashId,
) -> Result<Option<Partition>> {
let rec = sqlx::query_as::<_, Partition>(
r#"
SELECT id, hash_id, table_id, partition_key, sort_key, new_file_at
FROM partition
WHERE hash_id = $1;
"#,
)
.bind(partition_hash_id) // $1
.fetch_one(&mut self.inner)
.await;
if let Err(sqlx::Error::RowNotFound) = rec {
return Ok(None);
}
let partition = rec.map_err(|e| Error::SqlxError { source: e })?;
Ok(Some(partition))
}
async fn get_by_hash_id_batch(
&mut self,
partition_ids: &[&PartitionHashId],
) -> Result<Vec<Partition>> {
let ids: Vec<_> = partition_ids.iter().map(|p| p.as_bytes()).collect();
sqlx::query_as::<_, Partition>(
r#"
SELECT id, hash_id, table_id, partition_key, sort_key, new_file_at
FROM partition
WHERE hash_id = ANY($1);
"#,
)
.bind(&ids[..]) // $1
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })
}
async fn list_by_table_id(&mut self, table_id: TableId) -> Result<Vec<Partition>> {
sqlx::query_as::<_, Partition>(
r#"
SELECT id, hash_id, table_id, partition_key, sort_key, new_file_at
FROM partition
WHERE table_id = $1;
"#,
)
.bind(table_id) // $1
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })
}
async fn list_ids(&mut self) -> Result<Vec<PartitionId>> {
sqlx::query_as(
r#"
SELECT p.id as partition_id
FROM partition p
"#,
)
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })
}
/// Update the sort key for `partition_id` if and only if `old_sort_key`
/// matches the current value in the database.
///
/// This compare-and-swap operation is allowed to spuriously return
/// [`CasFailure::ValueMismatch`] for performance reasons (avoiding multiple
/// round trips to service a transaction in the happy path).
async fn cas_sort_key(
&mut self,
partition_id: &TransitionPartitionId,
old_sort_key: Option<Vec<String>>,
new_sort_key: &[&str],
) -> Result<Partition, CasFailure<Vec<String>>> {
let old_sort_key = old_sort_key.unwrap_or_default();
// This `match` will go away when all partitions have hash IDs in the database.
let query = match partition_id {
TransitionPartitionId::Deterministic(hash_id) => sqlx::query_as::<_, Partition>(
r#"
UPDATE partition
SET sort_key = $1
WHERE hash_id = $2 AND sort_key = $3
RETURNING id, hash_id, table_id, partition_key, sort_key, new_file_at;
"#,
)
.bind(new_sort_key) // $1
.bind(hash_id) // $2
.bind(&old_sort_key), // $3
TransitionPartitionId::Deprecated(id) => sqlx::query_as::<_, Partition>(
r#"
UPDATE partition
SET sort_key = $1
WHERE id = $2 AND sort_key = $3
RETURNING id, hash_id, table_id, partition_key, sort_key, new_file_at;
"#,
)
.bind(new_sort_key) // $1
.bind(id) // $2
.bind(&old_sort_key), // $3
};
let res = query.fetch_one(&mut self.inner).await;
let partition = match res {
Ok(v) => v,
Err(sqlx::Error::RowNotFound) => {
// This update may have failed either because:
//
// * A row with the specified ID did not exist at query time
// (but may exist now!)
// * The sort key does not match.
//
// To differentiate, we submit a get partition query, returning
// the actual sort key if successful.
//
// NOTE: this is racy, but documented - this might return "Sort
// key differs! Old key: <old sort key you provided>"
return Err(CasFailure::ValueMismatch(
crate::partition_lookup(self, partition_id)
.await
.map_err(CasFailure::QueryError)?
.ok_or(CasFailure::QueryError(Error::PartitionNotFound {
id: partition_id.clone(),
}))?
.sort_key,
));
}
Err(e) => return Err(CasFailure::QueryError(Error::SqlxError { source: e })),
};
debug!(
?partition_id,
?old_sort_key,
?new_sort_key,
"partition sort key cas successful"
);
Ok(partition)
}
async fn record_skipped_compaction(
&mut self,
partition_id: PartitionId,
reason: &str,
num_files: usize,
limit_num_files: usize,
limit_num_files_first_in_partition: usize,
estimated_bytes: u64,
limit_bytes: u64,
) -> Result<()> {
sqlx::query(
r#"
INSERT INTO skipped_compactions
( partition_id, reason, num_files, limit_num_files, limit_num_files_first_in_partition, estimated_bytes, limit_bytes, skipped_at )
VALUES
( $1, $2, $3, $4, $5, $6, $7, extract(epoch from NOW()) )
ON CONFLICT ( partition_id )
DO UPDATE
SET
reason = EXCLUDED.reason,
num_files = EXCLUDED.num_files,
limit_num_files = EXCLUDED.limit_num_files,
limit_num_files_first_in_partition = EXCLUDED.limit_num_files_first_in_partition,
estimated_bytes = EXCLUDED.estimated_bytes,
limit_bytes = EXCLUDED.limit_bytes,
skipped_at = EXCLUDED.skipped_at;
"#,
)
.bind(partition_id) // $1
.bind(reason)
.bind(num_files as i64)
.bind(limit_num_files as i64)
.bind(limit_num_files_first_in_partition as i64)
.bind(estimated_bytes as i64)
.bind(limit_bytes as i64)
.execute(&mut self.inner)
.await
.context(interface::CouldNotRecordSkippedCompactionSnafu { partition_id })?;
Ok(())
}
async fn get_in_skipped_compactions(
&mut self,
partition_ids: &[PartitionId],
) -> Result<Vec<SkippedCompaction>> {
let rec = sqlx::query_as::<_, SkippedCompaction>(
r#"SELECT * FROM skipped_compactions WHERE partition_id = ANY($1);"#,
)
.bind(partition_ids) // $1
.fetch_all(&mut self.inner)
.await;
if let Err(sqlx::Error::RowNotFound) = rec {
return Ok(Vec::new());
}
let skipped_partition_records = rec.map_err(|e| Error::SqlxError { source: e })?;
Ok(skipped_partition_records)
}
async fn list_skipped_compactions(&mut self) -> Result<Vec<SkippedCompaction>> {
sqlx::query_as::<_, SkippedCompaction>(
r#"
SELECT * FROM skipped_compactions
"#,
)
.fetch_all(&mut self.inner)
.await
.context(interface::CouldNotListSkippedCompactionsSnafu)
}
async fn delete_skipped_compactions(
&mut self,
partition_id: PartitionId,
) -> Result<Option<SkippedCompaction>> {
sqlx::query_as::<_, SkippedCompaction>(
r#"
DELETE FROM skipped_compactions
WHERE partition_id = $1
RETURNING *
"#,
)
.bind(partition_id)
.fetch_optional(&mut self.inner)
.await
.context(interface::CouldNotDeleteSkippedCompactionsSnafu)
}
async fn most_recent_n(&mut self, n: usize) -> Result<Vec<Partition>> {
sqlx::query_as(
r#"
SELECT id, hash_id, table_id, partition_key, sort_key, persisted_sequence_number, new_file_at
FROM partition
ORDER BY id DESC
LIMIT $1;"#,
)
.bind(n as i64) // $1
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })
}
async fn partitions_new_file_between(
&mut self,
minimum_time: Timestamp,
maximum_time: Option<Timestamp>,
) -> Result<Vec<PartitionId>> {
let sql = format!(
r#"
SELECT p.id as partition_id
FROM partition p
WHERE p.new_file_at > $1
{}
"#,
maximum_time
.map(|_| "AND p.new_file_at < $2")
.unwrap_or_default()
);
sqlx::query_as(&sql)
.bind(minimum_time) // $1
.bind(maximum_time) // $2
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })
}
}
#[async_trait]
impl ParquetFileRepo for PostgresTxn {
async fn create(&mut self, parquet_file_params: ParquetFileParams) -> Result<ParquetFile> {
let executor = &mut self.inner;
let id = create_parquet_file(executor, &parquet_file_params).await?;
Ok(ParquetFile::from_params(parquet_file_params, id))
}
async fn list_all(&mut self) -> Result<Vec<ParquetFile>> {
sqlx::query_as::<_, ParquetFile>(
r#"
SELECT parquet_file.id, parquet_file.namespace_id, parquet_file.table_id,
parquet_file.partition_id, parquet_file.partition_hash_id, parquet_file.object_store_id,
parquet_file.min_time, parquet_file.max_time, parquet_file.to_delete,
parquet_file.file_size_bytes, parquet_file.row_count, parquet_file.compaction_level,
parquet_file.created_at, parquet_file.column_set, parquet_file.max_l0_created_at
FROM parquet_file;
"#,
)
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })
}
async fn flag_for_delete_by_retention(&mut self) -> Result<Vec<ParquetFileId>> {
let flagged_at = Timestamp::from(self.time_provider.now());
// TODO - include check of table retention period once implemented
let flagged = sqlx::query(
r#"
WITH parquet_file_ids as (
SELECT parquet_file.id
FROM namespace, parquet_file
WHERE namespace.retention_period_ns IS NOT NULL
AND parquet_file.to_delete IS NULL
AND parquet_file.max_time < $1 - namespace.retention_period_ns
AND namespace.id = parquet_file.namespace_id
LIMIT $2
)
UPDATE parquet_file
SET to_delete = $1
WHERE id IN (SELECT id FROM parquet_file_ids)
RETURNING id;
"#,
)
.bind(flagged_at) // $1
.bind(MAX_PARQUET_FILES_SELECTED_ONCE_FOR_RETENTION) // $2
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })?;
let flagged = flagged.into_iter().map(|row| row.get("id")).collect();
Ok(flagged)
}
async fn list_by_namespace_not_to_delete(
&mut self,
namespace_id: NamespaceId,
) -> Result<Vec<ParquetFile>> {
sqlx::query_as::<_, ParquetFile>(
r#"
SELECT parquet_file.id, parquet_file.namespace_id, parquet_file.table_id,
parquet_file.partition_id, parquet_file.partition_hash_id, parquet_file.object_store_id,
parquet_file.min_time, parquet_file.max_time, parquet_file.to_delete,
parquet_file.file_size_bytes, parquet_file.row_count, parquet_file.compaction_level,
parquet_file.created_at, parquet_file.column_set, parquet_file.max_l0_created_at
FROM parquet_file
INNER JOIN table_name on table_name.id = parquet_file.table_id
WHERE table_name.namespace_id = $1
AND parquet_file.to_delete IS NULL;
"#,
)
.bind(namespace_id) // $1
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })
}
async fn list_by_table_not_to_delete(&mut self, table_id: TableId) -> Result<Vec<ParquetFile>> {
sqlx::query_as::<_, ParquetFile>(
r#"
SELECT id, namespace_id, table_id, partition_id, partition_hash_id, object_store_id,
min_time, max_time, to_delete, file_size_bytes, row_count, compaction_level, created_at,
column_set, max_l0_created_at
FROM parquet_file
WHERE table_id = $1 AND to_delete IS NULL;
"#,
)
.bind(table_id) // $1
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })
}
async fn delete_old_ids_only(&mut self, older_than: Timestamp) -> Result<Vec<ParquetFileId>> {
// see https://www.crunchydata.com/blog/simulating-update-or-delete-with-limit-in-postgres-ctes-to-the-rescue
let deleted = sqlx::query(
r#"
WITH parquet_file_ids as (
SELECT id
FROM parquet_file
WHERE to_delete < $1
LIMIT $2
)
DELETE FROM parquet_file
WHERE id IN (SELECT id FROM parquet_file_ids)
RETURNING id;
"#,
)
.bind(older_than) // $1
.bind(MAX_PARQUET_FILES_SELECTED_ONCE_FOR_DELETE) // $2
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })?;
let deleted = deleted.into_iter().map(|row| row.get("id")).collect();
Ok(deleted)
}
async fn list_by_partition_not_to_delete(
&mut self,
partition_id: &TransitionPartitionId,
) -> Result<Vec<ParquetFile>> {
// This `match` will go away when all partitions have hash IDs in the database.
let query = match partition_id {
TransitionPartitionId::Deterministic(hash_id) => sqlx::query_as::<_, ParquetFile>(
r#"
SELECT parquet_file.id, namespace_id, parquet_file.table_id, partition_id, partition_hash_id,
object_store_id, min_time, max_time, parquet_file.to_delete, file_size_bytes, row_count,
compaction_level, created_at, column_set, max_l0_created_at
FROM parquet_file
INNER JOIN partition
ON partition.id = parquet_file.partition_id OR partition.hash_id = parquet_file.partition_hash_id
WHERE partition.hash_id = $1
AND parquet_file.to_delete IS NULL;
"#,
)
.bind(hash_id), // $1
TransitionPartitionId::Deprecated(id) => sqlx::query_as::<_, ParquetFile>(
r#"
SELECT parquet_file.id, namespace_id, parquet_file.table_id, partition_id, partition_hash_id,
object_store_id, min_time, max_time, parquet_file.to_delete, file_size_bytes, row_count,
compaction_level, created_at, column_set, max_l0_created_at
FROM parquet_file
INNER JOIN partition
ON partition.id = parquet_file.partition_id OR partition.hash_id = parquet_file.partition_hash_id
WHERE partition.id = $1
AND parquet_file.to_delete IS NULL;
"#,
)
.bind(id), // $1
};
query
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })
}
async fn get_by_object_store_id(
&mut self,
object_store_id: Uuid,
) -> Result<Option<ParquetFile>> {
let rec = sqlx::query_as::<_, ParquetFile>(
r#"
SELECT id, namespace_id, table_id, partition_id, partition_hash_id, object_store_id, min_time,
max_time, to_delete, file_size_bytes, row_count, compaction_level, created_at, column_set,
max_l0_created_at
FROM parquet_file
WHERE object_store_id = $1;
"#,
)
.bind(object_store_id) // $1
.fetch_one(&mut self.inner)
.await;
if let Err(sqlx::Error::RowNotFound) = rec {
return Ok(None);
}
let parquet_file = rec.map_err(|e| Error::SqlxError { source: e })?;
Ok(Some(parquet_file))
}
async fn exists_by_object_store_id_batch(
&mut self,
object_store_ids: Vec<Uuid>,
) -> Result<Vec<Uuid>> {
sqlx::query(
// sqlx's readme suggests using PG's ANY operator instead of IN; see link below.
// https://github.com/launchbadge/sqlx/blob/main/FAQ.md#how-can-i-do-a-select--where-foo-in--query
r#"
SELECT object_store_id
FROM parquet_file
WHERE object_store_id = ANY($1);
"#,
)
.bind(object_store_ids) // $1
.map(|pgr| pgr.get::<Uuid, _>("object_store_id"))
.fetch_all(&mut self.inner)
.await
.map_err(|e| Error::SqlxError { source: e })
}
async fn create_upgrade_delete(
&mut self,
delete: &[ParquetFileId],
upgrade: &[ParquetFileId],
create: &[ParquetFileParams],
target_level: CompactionLevel,
) -> Result<Vec<ParquetFileId>> {
let delete_set: HashSet<_> = delete.iter().map(|d| d.get()).collect();
let upgrade_set: HashSet<_> = upgrade.iter().map(|u| u.get()).collect();
assert!(
delete_set.is_disjoint(&upgrade_set),
"attempted to upgrade a file scheduled for delete"
);
let mut tx = self
.inner
.pool
.begin()
.await
.map_err(|e| Error::StartTransaction { source: e })?;
let marked_at = Timestamp::from(self.time_provider.now());
flag_for_delete(&mut *tx, delete, marked_at).await?;
update_compaction_level(&mut *tx, upgrade, target_level).await?;
let mut ids = Vec::with_capacity(create.len());
for file in create {
let id = create_parquet_file(&mut *tx, file).await?;
ids.push(id);
}
tx.commit()
.await
.map_err(|source| Error::FailedToCommit { source })?;
Ok(ids)
}
}
// The following three functions are helpers to the create_upgrade_delete method.
// They are also used by the respective create/flag_for_delete/update_compaction_level methods.
async fn create_parquet_file<'q, E>(
executor: E,
parquet_file_params: &ParquetFileParams,
) -> Result<ParquetFileId>
where
E: Executor<'q, Database = Postgres>,
{
let ParquetFileParams {
namespace_id,
table_id,
partition_id,
object_store_id,
min_time,
max_time,
file_size_bytes,
row_count,
compaction_level,
created_at,
column_set,
max_l0_created_at,
} = parquet_file_params;
let (partition_id, partition_hash_id) = match partition_id {
TransitionPartitionId::Deterministic(hash_id) => (None, Some(hash_id)),
TransitionPartitionId::Deprecated(id) => (Some(id), None),
};
let partition_hash_id_ref = &partition_hash_id.as_ref();
let query = sqlx::query_scalar::<_, ParquetFileId>(
r#"
INSERT INTO parquet_file (
shard_id, table_id, partition_id, partition_hash_id, object_store_id,
min_time, max_time, file_size_bytes,
row_count, compaction_level, created_at, namespace_id, column_set, max_l0_created_at )
VALUES ( $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14 )
RETURNING id;
"#,
)
.bind(TRANSITION_SHARD_ID) // $1
.bind(table_id) // $2
.bind(partition_id) // $3
.bind(partition_hash_id_ref) // $4
.bind(object_store_id) // $5
.bind(min_time) // $6
.bind(max_time) // $7
.bind(file_size_bytes) // $8
.bind(row_count) // $9
.bind(compaction_level) // $10
.bind(created_at) // $11
.bind(namespace_id) // $12
.bind(column_set) // $13
.bind(max_l0_created_at); // $14
let parquet_file_id = query.fetch_one(executor).await.map_err(|e| {
if is_unique_violation(&e) {
Error::FileExists {
object_store_id: *object_store_id,
}
} else if is_fk_violation(&e) {
Error::ForeignKeyViolation { source: e }
} else {
Error::SqlxError { source: e }
}
})?;
Ok(parquet_file_id)
}
async fn flag_for_delete<'q, E>(
executor: E,
ids: &[ParquetFileId],
marked_at: Timestamp,
) -> Result<()>
where
E: Executor<'q, Database = Postgres>,
{
let query = sqlx::query(r#"UPDATE parquet_file SET to_delete = $1 WHERE id = ANY($2);"#)
.bind(marked_at) // $1
.bind(ids); // $2
query
.execute(executor)
.await
.map_err(|e| Error::SqlxError { source: e })?;
Ok(())
}
async fn update_compaction_level<'q, E>(
executor: E,
parquet_file_ids: &[ParquetFileId],
compaction_level: CompactionLevel,
) -> Result<()>
where
E: Executor<'q, Database = Postgres>,
{
let query = sqlx::query(
r#"
UPDATE parquet_file
SET compaction_level = $1
WHERE id = ANY($2);
"#,
)
.bind(compaction_level) // $1
.bind(parquet_file_ids); // $2
query
.execute(executor)
.await
.map_err(|e| Error::SqlxError { source: e })?;
Ok(())
}
/// The error code returned by Postgres for a unique constraint violation.
///
/// See <https://www.postgresql.org/docs/9.2/errcodes-appendix.html>
const PG_UNIQUE_VIOLATION: &str = "23505";
/// Returns true if `e` is a unique constraint violation error.
fn is_unique_violation(e: &sqlx::Error) -> bool {
if let sqlx::Error::Database(inner) = e {
if let Some(code) = inner.code() {
if code == PG_UNIQUE_VIOLATION {
return true;
}
}
}
false
}
/// Error code returned by Postgres for a foreign key constraint violation.
const PG_FK_VIOLATION: &str = "23503";
fn is_fk_violation(e: &sqlx::Error) -> bool {
if let sqlx::Error::Database(inner) = e {
if let Some(code) = inner.code() {
if code == PG_FK_VIOLATION {
return true;
}
}
}
false
}
/// Test helpers postgres testing.
#[cfg(test)]
pub(crate) mod test_utils {
use super::*;
use rand::Rng;
use sqlx::migrate::MigrateDatabase;
pub const TEST_DSN_ENV: &str = "TEST_INFLUXDB_IOX_CATALOG_DSN";
/// Helper macro to skip tests if TEST_INTEGRATION and TEST_INFLUXDB_IOX_CATALOG_DSN environment
/// variables are not set.
macro_rules! maybe_skip_integration {
($panic_msg:expr) => {{
dotenvy::dotenv().ok();
let required_vars = [crate::postgres::test_utils::TEST_DSN_ENV];
let unset_vars: Vec<_> = required_vars
.iter()
.filter_map(|&name| match std::env::var(name) {
Ok(_) => None,
Err(_) => Some(name),
})
.collect();
let unset_var_names = unset_vars.join(", ");
let force = std::env::var("TEST_INTEGRATION");
if force.is_ok() && !unset_var_names.is_empty() {
panic!(
"TEST_INTEGRATION is set, \
but variable(s) {} need to be set",
unset_var_names
);
} else if force.is_err() {
eprintln!(
"skipping Postgres integration test - set {}TEST_INTEGRATION to run",
if unset_var_names.is_empty() {
String::new()
} else {
format!("{} and ", unset_var_names)
}
);
let panic_msg: &'static str = $panic_msg;
if !panic_msg.is_empty() {
panic!("{}", panic_msg);
}
return;
}
}};
() => {
maybe_skip_integration!("")
};
}
pub(crate) use maybe_skip_integration;
pub async fn create_db(dsn: &str) {
// Create the catalog database if it doesn't exist
if !Postgres::database_exists(dsn).await.unwrap() {
// Ignore failure if another test has already created the database
let _ = Postgres::create_database(dsn).await;
}
}
pub async fn setup_db_no_migration() -> PostgresCatalog {
// create a random schema for this particular pool
let schema_name = {
// use scope to make it clear to clippy / rust that `rng` is
// not carried past await points
let mut rng = rand::thread_rng();
(&mut rng)
.sample_iter(rand::distributions::Alphanumeric)
.filter(|c| c.is_ascii_alphabetic())
.take(20)
.map(char::from)
.collect::<String>()
};
let metrics = Arc::new(metric::Registry::default());
let dsn = std::env::var("TEST_INFLUXDB_IOX_CATALOG_DSN").unwrap();
create_db(&dsn).await;
let options = PostgresConnectionOptions {
app_name: String::from("test"),
schema_name: schema_name.clone(),
dsn,
max_conns: 3,
..Default::default()
};
let pg = PostgresCatalog::connect(options, metrics)
.await
.expect("failed to connect catalog");
// Create the test schema
pg.pool
.execute(format!("CREATE SCHEMA {schema_name};").as_str())
.await
.expect("failed to create test schema");
// Ensure the test user has permission to interact with the test schema.
pg.pool
.execute(
format!(
"GRANT USAGE ON SCHEMA {schema_name} TO public; GRANT CREATE ON SCHEMA {schema_name} TO public;"
)
.as_str(),
)
.await
.expect("failed to grant privileges to schema");
pg
}
pub async fn setup_db() -> PostgresCatalog {
let pg = setup_db_no_migration().await;
// Run the migrations against this random schema.
pg.setup().await.expect("failed to initialise database");
pg
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
postgres::test_utils::{
create_db, maybe_skip_integration, setup_db, setup_db_no_migration,
},
test_helpers::{arbitrary_namespace, arbitrary_parquet_file_params, arbitrary_table},
};
use assert_matches::assert_matches;
use data_types::partition_template::TemplatePart;
use generated_types::influxdata::iox::partition_template::v1 as proto;
use metric::{Attributes, DurationHistogram, Metric, Observation, RawReporter};
use std::{io::Write, ops::Deref, sync::Arc, time::Instant};
use tempfile::NamedTempFile;
use test_helpers::maybe_start_logging;
fn assert_metric_hit(metrics: &metric::Registry, name: &'static str) {
let histogram = metrics
.get_instrument::<Metric<DurationHistogram>>("catalog_op_duration")
.expect("failed to read metric")
.get_observer(&Attributes::from(&[("op", name), ("result", "success")]))
.expect("failed to get observer")
.fetch();
let hit_count = histogram.sample_count();
assert!(hit_count > 0, "metric did not record any calls");
}
/// Small no-op test just to print out the migrations.
///
/// This is helpful to look up migration checksums and debug parsing of the migration files.
#[test]
fn print_migrations() {
println!("{:#?}", MIGRATOR.deref());
}
#[tokio::test]
async fn test_migration() {
maybe_skip_integration!();
maybe_start_logging();
let postgres = setup_db_no_migration().await;
// 1st setup
postgres.setup().await.unwrap();
// 2nd setup
postgres.setup().await.unwrap();
}
#[tokio::test]
async fn test_migration_generic() {
use crate::migrate::test_utils::test_migration;
maybe_skip_integration!();
maybe_start_logging();
test_migration(&MIGRATOR, || async {
setup_db_no_migration().await.into_pool()
})
.await
.unwrap();
}
#[tokio::test]
async fn test_catalog() {
maybe_skip_integration!();
let postgres = setup_db().await;
// Validate the connection time zone is the expected UTC value.
let tz: String = sqlx::query_scalar("SHOW TIME ZONE;")
.fetch_one(&postgres.pool)
.await
.expect("read time zone");
assert_eq!(tz, "UTC");
let pool = postgres.pool.clone();
let schema_name = postgres.schema_name().to_string();
let postgres: Arc<dyn Catalog> = Arc::new(postgres);
crate::interface::test_helpers::test_catalog(|| async {
// Clean the schema.
pool
.execute(format!("DROP SCHEMA {schema_name} CASCADE").as_str())
.await
.expect("failed to clean schema between tests");
// Recreate the test schema
pool
.execute(format!("CREATE SCHEMA {schema_name};").as_str())
.await
.expect("failed to create test schema");
// Ensure the test user has permission to interact with the test schema.
pool
.execute(
format!(
"GRANT USAGE ON SCHEMA {schema_name} TO public; GRANT CREATE ON SCHEMA {schema_name} TO public;"
)
.as_str(),
)
.await
.expect("failed to grant privileges to schema");
// Run the migrations against this random schema.
postgres.setup().await.expect("failed to initialise database");
Arc::clone(&postgres)
})
.await;
}
#[tokio::test]
async fn test_partition_create_or_get_idempotent() {
maybe_skip_integration!();
let postgres = setup_db().await;
let postgres: Arc<dyn Catalog> = Arc::new(postgres);
let mut repos = postgres.repositories().await;
let namespace = arbitrary_namespace(&mut *repos, "ns4").await;
let table_id = arbitrary_table(&mut *repos, "table", &namespace).await.id;
let key = PartitionKey::from("bananas");
let hash_id = PartitionHashId::new(table_id, &key);
let a = repos
.partitions()
.create_or_get(key.clone(), table_id)
.await
.expect("should create OK");
assert_eq!(a.hash_id().unwrap(), &hash_id);
// Call create_or_get for the same (key, table_id) pair, to ensure the write is idempotent.
let b = repos
.partitions()
.create_or_get(key.clone(), table_id)
.await
.expect("idempotent write should succeed");
assert_eq!(a, b);
// Check that the hash_id is saved in the database and is returned when queried.
let table_partitions = postgres
.repositories()
.await
.partitions()
.list_by_table_id(table_id)
.await
.unwrap();
assert_eq!(table_partitions.len(), 1);
assert_eq!(table_partitions[0].hash_id().unwrap(), &hash_id);
}
#[tokio::test]
async fn existing_partitions_without_hash_id() {
maybe_skip_integration!();
let postgres = setup_db().await;
let pool = postgres.pool.clone();
let postgres: Arc<dyn Catalog> = Arc::new(postgres);
let mut repos = postgres.repositories().await;
let namespace = arbitrary_namespace(&mut *repos, "ns4").await;
let table = arbitrary_table(&mut *repos, "table", &namespace).await;
let table_id = table.id;
let key = PartitionKey::from("francis-scott-key-key");
// Create a partition record in the database that has `NULL` for its `hash_id`
// value, which is what records existing before the migration adding that column will have.
sqlx::query(
r#"
INSERT INTO partition
(partition_key, shard_id, table_id, sort_key)
VALUES
( $1, $2, $3, '{}')
ON CONFLICT ON CONSTRAINT partition_key_unique
DO UPDATE SET partition_key = partition.partition_key
RETURNING id, hash_id, table_id, partition_key, sort_key, new_file_at;
"#,
)
.bind(&key) // $1
.bind(TRANSITION_SHARD_ID) // $2
.bind(table_id) // $3
.fetch_one(&pool)
.await
.unwrap();
// Check that the hash_id being null in the database doesn't break querying for partitions.
let table_partitions = repos.partitions().list_by_table_id(table_id).await.unwrap();
assert_eq!(table_partitions.len(), 1);
let partition = &table_partitions[0];
assert!(partition.hash_id().is_none());
// Call create_or_get for the same (key, table_id) pair, to ensure the write is idempotent
// and that the hash_id still doesn't get set.
let inserted_again = repos
.partitions()
.create_or_get(key, table_id)
.await
.expect("idempotent write should succeed");
assert_eq!(partition, &inserted_again);
// Create a Parquet file record in this partition to ensure we don't break new data
// ingestion for old-style partitions
let parquet_file_params = arbitrary_parquet_file_params(&namespace, &table, partition);
let parquet_file = repos
.parquet_files()
.create(parquet_file_params)
.await
.unwrap();
assert_matches!(
parquet_file.partition_id,
TransitionPartitionId::Deprecated(_)
);
}
#[test]
fn test_parse_dsn_file() {
assert_eq!(
get_dsn_file_path("dsn-file:///tmp/my foo.txt"),
Some("/tmp/my foo.txt".to_owned()),
);
assert_eq!(get_dsn_file_path("dsn-file:blah"), None,);
assert_eq!(get_dsn_file_path("postgres://user:pw@host/db"), None,);
}
#[tokio::test]
async fn test_reload() {
maybe_skip_integration!();
const POLLING_INTERVAL: Duration = Duration::from_millis(10);
// fetch dsn from envvar
let test_dsn = std::env::var("TEST_INFLUXDB_IOX_CATALOG_DSN").unwrap();
create_db(&test_dsn).await;
eprintln!("TEST_DSN={test_dsn}");
// create a temp file to store the initial dsn
let mut dsn_file = NamedTempFile::new().expect("create temp file");
dsn_file
.write_all(test_dsn.as_bytes())
.expect("write temp file");
const TEST_APPLICATION_NAME: &str = "test_application_name";
let dsn_good = format!("dsn-file://{}", dsn_file.path().display());
eprintln!("dsn_good={dsn_good}");
// create a hot swap pool with test application name and dsn file pointing to tmp file.
// we will later update this file and the pool should be replaced.
let options = PostgresConnectionOptions {
app_name: TEST_APPLICATION_NAME.to_owned(),
schema_name: String::from("test"),
dsn: dsn_good,
max_conns: 3,
hotswap_poll_interval: POLLING_INTERVAL,
..Default::default()
};
let metrics = Arc::new(metric::Registry::new());
let pool = new_pool(&options, metrics).await.expect("connect");
eprintln!("got a pool");
// ensure the application name is set as expected
let application_name: String =
sqlx::query_scalar("SELECT current_setting('application_name') as application_name;")
.fetch_one(&pool)
.await
.expect("read application_name");
assert_eq!(application_name, TEST_APPLICATION_NAME);
// create a new temp file object with updated dsn and overwrite the previous tmp file
const TEST_APPLICATION_NAME_NEW: &str = "changed_application_name";
let mut new_dsn_file = NamedTempFile::new().expect("create temp file");
new_dsn_file
.write_all(test_dsn.as_bytes())
.expect("write temp file");
new_dsn_file
.write_all(format!("?application_name={TEST_APPLICATION_NAME_NEW}").as_bytes())
.expect("write temp file");
new_dsn_file
.persist(dsn_file.path())
.expect("overwrite new dsn file");
// wait until the hotswap machinery has reloaded the updated DSN file and
// successfully performed a new connection with the new DSN.
let mut application_name = "".to_string();
let start = Instant::now();
while start.elapsed() < Duration::from_secs(5)
&& application_name != TEST_APPLICATION_NAME_NEW
{
tokio::time::sleep(POLLING_INTERVAL).await;
application_name = sqlx::query_scalar(
"SELECT current_setting('application_name') as application_name;",
)
.fetch_one(&pool)
.await
.expect("read application_name");
}
assert_eq!(application_name, TEST_APPLICATION_NAME_NEW);
}
macro_rules! test_column_create_or_get_many_unchecked {
(
$name:ident,
calls = {$([$($col_name:literal => $col_type:expr),+ $(,)?]),+},
want = $($want:tt)+
) => {
paste::paste! {
#[tokio::test]
async fn [<test_column_create_or_get_many_unchecked_ $name>]() {
maybe_skip_integration!();
let postgres = setup_db().await;
let metrics = Arc::clone(&postgres.metrics);
let postgres: Arc<dyn Catalog> = Arc::new(postgres);
let mut repos = postgres.repositories().await;
let namespace = arbitrary_namespace(&mut *repos, "ns4")
.await;
let table_id = arbitrary_table(&mut *repos, "table", &namespace)
.await
.id;
$(
let mut insert = HashMap::new();
$(
insert.insert($col_name, $col_type);
)+
let got = repos
.columns()
.create_or_get_many_unchecked(table_id, insert.clone())
.await;
// The returned columns MUST always match the requested
// column values if successful.
if let Ok(got) = &got {
assert_eq!(insert.len(), got.len());
for got in got {
assert_eq!(table_id, got.table_id);
let requested_column_type = insert
.get(got.name.as_str())
.expect("Should have gotten back a column that was inserted");
assert_eq!(
*requested_column_type,
ColumnType::try_from(got.column_type)
.expect("invalid column type")
);
}
assert_metric_hit(&metrics, "column_create_or_get_many_unchecked");
}
)+
assert_matches!(got, $($want)+);
}
}
}
}
// Issue a few calls to create_or_get_many that contain distinct columns and
// covers the full set of column types.
test_column_create_or_get_many_unchecked!(
insert,
calls = {
[
"test1" => ColumnType::I64,
"test2" => ColumnType::U64,
"test3" => ColumnType::F64,
"test4" => ColumnType::Bool,
"test5" => ColumnType::String,
"test6" => ColumnType::Time,
"test7" => ColumnType::Tag,
],
[
"test8" => ColumnType::String,
"test9" => ColumnType::Bool,
]
},
want = Ok(_)
);
// Issue two calls with overlapping columns - request should succeed (upsert
// semantics).
test_column_create_or_get_many_unchecked!(
partial_upsert,
calls = {
[
"test1" => ColumnType::I64,
"test2" => ColumnType::U64,
"test3" => ColumnType::F64,
"test4" => ColumnType::Bool,
],
[
"test1" => ColumnType::I64,
"test2" => ColumnType::U64,
"test3" => ColumnType::F64,
"test4" => ColumnType::Bool,
"test5" => ColumnType::String,
"test6" => ColumnType::Time,
"test7" => ColumnType::Tag,
"test8" => ColumnType::String,
]
},
want = Ok(_)
);
// Issue two calls with the same columns and types.
test_column_create_or_get_many_unchecked!(
full_upsert,
calls = {
[
"test1" => ColumnType::I64,
"test2" => ColumnType::U64,
"test3" => ColumnType::F64,
"test4" => ColumnType::Bool,
],
[
"test1" => ColumnType::I64,
"test2" => ColumnType::U64,
"test3" => ColumnType::F64,
"test4" => ColumnType::Bool,
]
},
want = Ok(_)
);
// Issue two calls with overlapping columns with conflicting types and
// observe a correctly populated ColumnTypeMismatch error.
test_column_create_or_get_many_unchecked!(
partial_type_conflict,
calls = {
[
"test1" => ColumnType::String,
"test2" => ColumnType::String,
"test3" => ColumnType::String,
"test4" => ColumnType::String,
],
[
"test1" => ColumnType::String,
"test2" => ColumnType::Bool, // This one differs
"test3" => ColumnType::String,
// 4 is missing.
"test5" => ColumnType::String,
"test6" => ColumnType::Time,
"test7" => ColumnType::Tag,
"test8" => ColumnType::String,
]
},
want = Err(e) => {
assert_matches!(e, Error::ColumnTypeMismatch { name, existing, new } => {
assert_eq!(name, "test2");
assert_eq!(existing, ColumnType::String);
assert_eq!(new, ColumnType::Bool);
})
}
);
#[tokio::test]
async fn test_billing_summary_on_parqet_file_creation() {
maybe_skip_integration!();
let postgres = setup_db().await;
let pool = postgres.pool.clone();
let postgres: Arc<dyn Catalog> = Arc::new(postgres);
let mut repos = postgres.repositories().await;
let namespace = arbitrary_namespace(&mut *repos, "ns4").await;
let table = arbitrary_table(&mut *repos, "table", &namespace).await;
let key = "bananas";
let partition = repos
.partitions()
.create_or_get(key.into(), table.id)
.await
.unwrap();
// parquet file to create- all we care about here is the size
let mut p1 = arbitrary_parquet_file_params(&namespace, &table, &partition);
p1.file_size_bytes = 1337;
let f1 = repos.parquet_files().create(p1.clone()).await.unwrap();
// insert the same again with a different size; we should then have 3x1337 as total file
// size
p1.object_store_id = Uuid::new_v4();
p1.file_size_bytes *= 2;
let _f2 = repos
.parquet_files()
.create(p1.clone())
.await
.expect("create parquet file should succeed");
// after adding two files we should have 3x1337 in the summary
let total_file_size_bytes: i64 =
sqlx::query_scalar("SELECT total_file_size_bytes FROM billing_summary;")
.fetch_one(&pool)
.await
.expect("fetch total file size failed");
assert_eq!(total_file_size_bytes, 1337 * 3);
// flag f1 for deletion and assert that the total file size is reduced accordingly.
repos
.parquet_files()
.create_upgrade_delete(&[f1.id], &[], &[], CompactionLevel::Initial)
.await
.expect("flag parquet file for deletion should succeed");
let total_file_size_bytes: i64 =
sqlx::query_scalar("SELECT total_file_size_bytes FROM billing_summary;")
.fetch_one(&pool)
.await
.expect("fetch total file size failed");
// we marked the first file of size 1337 for deletion leaving only the second that was 2x
// that
assert_eq!(total_file_size_bytes, 1337 * 2);
// actually deleting shouldn't change the total
let older_than = p1.created_at + 1;
repos
.parquet_files()
.delete_old_ids_only(older_than)
.await
.expect("parquet file deletion should succeed");
let total_file_size_bytes: i64 =
sqlx::query_scalar("SELECT total_file_size_bytes FROM billing_summary;")
.fetch_one(&pool)
.await
.expect("fetch total file size failed");
assert_eq!(total_file_size_bytes, 1337 * 2);
}
#[tokio::test]
async fn namespace_partition_template_null_is_the_default_in_the_database() {
maybe_skip_integration!();
let postgres = setup_db().await;
let pool = postgres.pool.clone();
let postgres: Arc<dyn Catalog> = Arc::new(postgres);
let mut repos = postgres.repositories().await;
let namespace_name = "apples";
// Create a namespace record in the database that has `NULL` for its `partition_template`
// value, which is what records existing before the migration adding that column will have.
let insert_null_partition_template_namespace = sqlx::query(
r#"
INSERT INTO namespace (
name, topic_id, query_pool_id, retention_period_ns, max_tables, partition_template
)
VALUES ( $1, $2, $3, $4, $5, NULL )
RETURNING id, name, retention_period_ns, max_tables, max_columns_per_table, deleted_at,
partition_template;
"#,
)
.bind(namespace_name) // $1
.bind(SHARED_TOPIC_ID) // $2
.bind(SHARED_QUERY_POOL_ID) // $3
.bind(None::<Option<i64>>) // $4
.bind(DEFAULT_MAX_TABLES); // $5
insert_null_partition_template_namespace
.fetch_one(&pool)
.await
.unwrap();
let lookup_namespace = repos
.namespaces()
.get_by_name(namespace_name, SoftDeletedRows::ExcludeDeleted)
.await
.unwrap()
.unwrap();
// When fetching this namespace from the database, the `FromRow` impl should set its
// `partition_template` to the default.
assert_eq!(
lookup_namespace.partition_template,
NamespacePartitionTemplateOverride::default()
);
// When creating a namespace through the catalog functions without specifying a custom
// partition template,
let created_without_custom_template = repos
.namespaces()
.create(
&"lemons".try_into().unwrap(),
None, // no partition template
None,
None,
)
.await
.unwrap();
// it should have the default template in the application,
assert_eq!(
created_without_custom_template.partition_template,
NamespacePartitionTemplateOverride::default()
);
// and store NULL in the database record.
let record = sqlx::query("SELECT name, partition_template FROM namespace WHERE id = $1;")
.bind(created_without_custom_template.id)
.fetch_one(&pool)
.await
.unwrap();
let name: String = record.try_get("name").unwrap();
assert_eq!(created_without_custom_template.name, name);
let partition_template: Option<NamespacePartitionTemplateOverride> =
record.try_get("partition_template").unwrap();
assert!(partition_template.is_none());
// When explicitly setting a template that happens to be equal to the application default,
// assume it's important that it's being specially requested and store it rather than NULL.
let namespace_custom_template_name = "kumquats";
let custom_partition_template_equal_to_default =
NamespacePartitionTemplateOverride::try_from(proto::PartitionTemplate {
parts: vec![proto::TemplatePart {
part: Some(proto::template_part::Part::TimeFormat(
"%Y-%m-%d".to_owned(),
)),
}],
})
.unwrap();
let namespace_custom_template = repos
.namespaces()
.create(
&namespace_custom_template_name.try_into().unwrap(),
Some(custom_partition_template_equal_to_default.clone()),
None,
None,
)
.await
.unwrap();
assert_eq!(
namespace_custom_template.partition_template,
custom_partition_template_equal_to_default
);
let record = sqlx::query("SELECT name, partition_template FROM namespace WHERE id = $1;")
.bind(namespace_custom_template.id)
.fetch_one(&pool)
.await
.unwrap();
let name: String = record.try_get("name").unwrap();
assert_eq!(namespace_custom_template.name, name);
let partition_template: Option<NamespacePartitionTemplateOverride> =
record.try_get("partition_template").unwrap();
assert_eq!(
partition_template.unwrap(),
custom_partition_template_equal_to_default
);
}
#[tokio::test]
async fn table_partition_template_null_is_the_default_in_the_database() {
maybe_skip_integration!();
let postgres = setup_db().await;
let pool = postgres.pool.clone();
let postgres: Arc<dyn Catalog> = Arc::new(postgres);
let mut repos = postgres.repositories().await;
let namespace_default_template_name = "oranges";
let namespace_default_template = repos
.namespaces()
.create(
&namespace_default_template_name.try_into().unwrap(),
None, // no partition template
None,
None,
)
.await
.unwrap();
let namespace_custom_template_name = "limes";
let namespace_custom_template = repos
.namespaces()
.create(
&namespace_custom_template_name.try_into().unwrap(),
Some(
NamespacePartitionTemplateOverride::try_from(proto::PartitionTemplate {
parts: vec![proto::TemplatePart {
part: Some(proto::template_part::Part::TimeFormat("year-%Y".into())),
}],
})
.unwrap(),
),
None,
None,
)
.await
.unwrap();
// In a namespace that also has a NULL template, create a table record in the database that
// has `NULL` for its `partition_template` value, which is what records existing before the
// migration adding that column will have.
let table_name = "null_template";
let insert_null_partition_template_table = sqlx::query(
r#"
INSERT INTO table_name ( name, namespace_id, partition_template )
VALUES ( $1, $2, NULL )
RETURNING *;
"#,
)
.bind(table_name) // $1
.bind(namespace_default_template.id); // $2
insert_null_partition_template_table
.fetch_one(&pool)
.await
.unwrap();
let lookup_table = repos
.tables()
.get_by_namespace_and_name(namespace_default_template.id, table_name)
.await
.unwrap()
.unwrap();
// When fetching this table from the database, the `FromRow` impl should set its
// `partition_template` to the system default (because the namespace didn't have a template
// either).
assert_eq!(
lookup_table.partition_template,
TablePartitionTemplateOverride::default()
);
// In a namespace that has a custom template, create a table record in the database that
// has `NULL` for its `partition_template` value.
//
// THIS ACTUALLY SHOULD BE IMPOSSIBLE because:
//
// * Namespaces have to exist before tables
// * `partition_tables` are immutable on both namespaces and tables
// * When the migration adding the `partition_table` column is deployed, namespaces can
// begin to be created with `partition_templates`
// * *Then* tables can be created with `partition_templates` or not
// * When tables don't get a custom table partition template but their namespace has one,
// their database record will get the namespace partition template.
//
// In other words, table `partition_template` values in the database is allowed to possibly
// be `NULL` IFF their namespace's `partition_template` is `NULL`.
//
// That said, this test creates this hopefully-impossible scenario to ensure that the
// defined, expected behavior if a table record somehow exists in the database with a `NULL`
// `partition_template` value is that it will have the application default partition
// template *even if the namespace `partition_template` is not null*.
let table_name = "null_template";
let insert_null_partition_template_table = sqlx::query(
r#"
INSERT INTO table_name ( name, namespace_id, partition_template )
VALUES ( $1, $2, NULL )
RETURNING *;
"#,
)
.bind(table_name) // $1
.bind(namespace_custom_template.id); // $2
insert_null_partition_template_table
.fetch_one(&pool)
.await
.unwrap();
let lookup_table = repos
.tables()
.get_by_namespace_and_name(namespace_custom_template.id, table_name)
.await
.unwrap()
.unwrap();
// When fetching this table from the database, the `FromRow` impl should set its
// `partition_template` to the system default *even though the namespace has a
// template*, because this should be impossible as detailed above.
assert_eq!(
lookup_table.partition_template,
TablePartitionTemplateOverride::default()
);
// # Table template false, namespace template true
//
// When creating a table through the catalog functions *without* a custom table template in
// a namespace *with* a custom partition template,
let table_no_template_with_namespace_template = repos
.tables()
.create(
"pomelo",
TablePartitionTemplateOverride::try_new(
None, // no custom partition template
&namespace_custom_template.partition_template,
)
.unwrap(),
namespace_custom_template.id,
)
.await
.unwrap();
// it should have the namespace's template
assert_eq!(
table_no_template_with_namespace_template.partition_template,
TablePartitionTemplateOverride::try_new(
None,
&namespace_custom_template.partition_template
)
.unwrap()
);
// and store that value in the database record.
let record = sqlx::query("SELECT name, partition_template FROM table_name WHERE id = $1;")
.bind(table_no_template_with_namespace_template.id)
.fetch_one(&pool)
.await
.unwrap();
let name: String = record.try_get("name").unwrap();
assert_eq!(table_no_template_with_namespace_template.name, name);
let partition_template: Option<TablePartitionTemplateOverride> =
record.try_get("partition_template").unwrap();
assert_eq!(
partition_template.unwrap(),
TablePartitionTemplateOverride::try_new(
None,
&namespace_custom_template.partition_template
)
.unwrap()
);
// # Table template true, namespace template false
//
// When creating a table through the catalog functions *with* a custom table template in
// a namespace *without* a custom partition template,
let custom_table_template = proto::PartitionTemplate {
parts: vec![proto::TemplatePart {
part: Some(proto::template_part::Part::TagValue("chemical".into())),
}],
};
let table_with_template_no_namespace_template = repos
.tables()
.create(
"tangerine",
TablePartitionTemplateOverride::try_new(
Some(custom_table_template), // with custom partition template
&namespace_default_template.partition_template,
)
.unwrap(),
namespace_default_template.id,
)
.await
.unwrap();
// it should have the custom table template
let table_template_parts: Vec<_> = table_with_template_no_namespace_template
.partition_template
.parts()
.collect();
assert_eq!(table_template_parts.len(), 1);
assert_matches!(
table_template_parts[0],
TemplatePart::TagValue(tag) if tag == "chemical"
);
// and store that value in the database record.
let record = sqlx::query("SELECT name, partition_template FROM table_name WHERE id = $1;")
.bind(table_with_template_no_namespace_template.id)
.fetch_one(&pool)
.await
.unwrap();
let name: String = record.try_get("name").unwrap();
assert_eq!(table_with_template_no_namespace_template.name, name);
let partition_template = record
.try_get::<Option<TablePartitionTemplateOverride>, _>("partition_template")
.unwrap()
.unwrap();
let table_template_parts: Vec<_> = partition_template.parts().collect();
assert_eq!(table_template_parts.len(), 1);
assert_matches!(
table_template_parts[0],
TemplatePart::TagValue(tag) if tag == "chemical"
);
// # Table template true, namespace template true
//
// When creating a table through the catalog functions *with* a custom table template in
// a namespace *with* a custom partition template,
let custom_table_template = proto::PartitionTemplate {
parts: vec![proto::TemplatePart {
part: Some(proto::template_part::Part::TagValue("vegetable".into())),
}],
};
let table_with_template_with_namespace_template = repos
.tables()
.create(
"nectarine",
TablePartitionTemplateOverride::try_new(
Some(custom_table_template), // with custom partition template
&namespace_custom_template.partition_template,
)
.unwrap(),
namespace_custom_template.id,
)
.await
.unwrap();
// it should have the custom table template
let table_template_parts: Vec<_> = table_with_template_with_namespace_template
.partition_template
.parts()
.collect();
assert_eq!(table_template_parts.len(), 1);
assert_matches!(
table_template_parts[0],
TemplatePart::TagValue(tag) if tag == "vegetable"
);
// and store that value in the database record.
let record = sqlx::query("SELECT name, partition_template FROM table_name WHERE id = $1;")
.bind(table_with_template_with_namespace_template.id)
.fetch_one(&pool)
.await
.unwrap();
let name: String = record.try_get("name").unwrap();
assert_eq!(table_with_template_with_namespace_template.name, name);
let partition_template = record
.try_get::<Option<TablePartitionTemplateOverride>, _>("partition_template")
.unwrap()
.unwrap();
let table_template_parts: Vec<_> = partition_template.parts().collect();
assert_eq!(table_template_parts.len(), 1);
assert_matches!(
table_template_parts[0],
TemplatePart::TagValue(tag) if tag == "vegetable"
);
// # Table template false, namespace template false
//
// When creating a table through the catalog functions *without* a custom table template in
// a namespace *without* a custom partition template,
let table_no_template_no_namespace_template = repos
.tables()
.create(
"grapefruit",
TablePartitionTemplateOverride::try_new(
None, // no custom partition template
&namespace_default_template.partition_template,
)
.unwrap(),
namespace_default_template.id,
)
.await
.unwrap();
// it should have the default template in the application,
assert_eq!(
table_no_template_no_namespace_template.partition_template,
TablePartitionTemplateOverride::default()
);
// and store NULL in the database record.
let record = sqlx::query("SELECT name, partition_template FROM table_name WHERE id = $1;")
.bind(table_no_template_no_namespace_template.id)
.fetch_one(&pool)
.await
.unwrap();
let name: String = record.try_get("name").unwrap();
assert_eq!(table_no_template_no_namespace_template.name, name);
let partition_template: Option<TablePartitionTemplateOverride> =
record.try_get("partition_template").unwrap();
assert!(partition_template.is_none());
}
#[tokio::test]
async fn test_metrics() {
maybe_skip_integration!();
let postgres = setup_db_no_migration().await;
let mut reporter = RawReporter::default();
postgres.metrics.report(&mut reporter);
assert_eq!(
reporter
.metric("sqlx_postgres_connections")
.unwrap()
.observation(&[("pool_id", "0"), ("state", "min")])
.unwrap(),
&Observation::U64Gauge(1),
);
assert_eq!(
reporter
.metric("sqlx_postgres_connections")
.unwrap()
.observation(&[("pool_id", "0"), ("state", "max")])
.unwrap(),
&Observation::U64Gauge(3),
);
}
}
|
//
// Error
//! Displays a window with an error message.
//
use terminal::Terminal;
/// The width of the error window in cells.
const WIDTH: u32 = 51;
/// The height of the error window in cells.
const HEIGHT: u32 = 19;
/// The error window.
pub struct ErrorWindow {
pub term: Terminal,
}
impl ErrorWindow {
/// Creates a new error window with the given message.
pub fn new(messages: &[&str]) -> ErrorWindow {
// Create a terminal
let mut term = Terminal::new("Error", WIDTH, HEIGHT, 4, 4);
term.cursor_visibility(false);
// Write the title
term.write("Error", (WIDTH - 5) / 2, 3);
// Write the message lines, centering each
let mut i = 0;
for message in messages.iter() {
term.write(*message, (WIDTH - message.len() as u32) / 2, 6 + i);
i += 1;
}
ErrorWindow {
term: term,
}
}
/// Updates events so that the UI doesn't freeze.
pub fn update(&mut self) {
self.term.events();
}
}
|
/*
Gordon Adam
1107425
Struct to represent a Resource Record
*/
use std::default;
use std::io::BufReader;
use std::io::net::ip::{Ipv4Addr, SocketAddr};
use data;
#[deriving(Default,Clone)]
pub struct Resource {
pub rname: data::Data, // This is the name the resource record pertains to
pub rtype: u16, // This is the type of the resource record
pub rclass: u16, // This is the class of the resource record
pub ttl: u32, // This is the time to live of the resource record
pub rdlength: u16, // This is the length of the data held in rdata
pub rdata: data::Data, // this is the data of the resource record
pub cache_timeout: i64,
}
impl Resource {
// Creates an instance of the struct with default values
pub fn new() -> Resource {
return Resource {..default::Default::default()};
}
// This reads in from a buffered reader the values into the struct
pub fn read_in(&mut self, reader: &mut BufReader, msg_copy: &mut Vec<u8>) -> Result<(), String> {
self.rname = data::Data::new();
self.rname.read_hostname(reader, msg_copy);
self.rtype = reader.read_be_u16().unwrap();
self.rclass = reader.read_be_u16().unwrap();
self.ttl = reader.read_be_u32().unwrap();
self.rdlength = reader.read_be_u16().unwrap();
match self.rtype {
0x0001 => {self.rdata.read_ipv4_addr(reader)},
0x0002 => {self.rdata.read_hostname(reader, msg_copy)},
0x0005 => {self.rdata.read_hostname(reader, msg_copy)},
0x0006 => {self.rdata.read_soa(reader, msg_copy)},
0x000c => {return Err("Error: Resource Type (12) not supported".to_string());},
0x000f => {return Err("Error: Resource Type (15) not supported".to_string());},
0x0010 => {return Err("Error: Resource Type (16) not supported".to_string());},
0x0011 => {return Err("Error: Resource Type (17) not supported".to_string());},
0x0012 => {return Err("Error: Resource Type (18) not supported".to_string());},
0x0018 => {return Err("Error: Resource Type (24) not supported".to_string());},
0x0019 => {return Err("Error: Resource Type (25) not supported".to_string());},
0x001c => {self.rdata.read_ipv6_addr(reader)},
_ => {return Err("Error: Resource Type (?) not supported".to_string());},
}
if (self.rtype == 2) || (self.rtype == 5) {
self.rdlength = 0;
for i in range(0u, self.rdata.length.len()) {
self.rdlength = self.rdlength + (self.rdata.length[i] as u16 + 1);
}
self.rdlength = self.rdlength + 1;
}
self.cache_timeout = 0;
return Ok(());
}
// converts the struct to a vector of u8 characters and returns it
pub fn write(&mut self) -> Vec<u8>{
let mut resource_buffer: Vec<u8> = vec![];
resource_buffer.push_all(self.rname.write().as_slice());
split_u16(self.rtype, &mut resource_buffer);
split_u16(self.rclass, &mut resource_buffer);
let mut temp_buffer: u16;
temp_buffer = ((self.ttl & 0xFFFF0000) >> 16) as u16;
split_u16(temp_buffer, &mut resource_buffer);
temp_buffer = ((self.ttl & 0x0000FFFF) >> 0) as u16;
split_u16(temp_buffer, &mut resource_buffer);
split_u16(self.rdlength, &mut resource_buffer);
if self.rtype == 1 {
resource_buffer.push_all(self.rdata.write_ip_addr().as_slice());
} else if self.rtype == 5 {
resource_buffer.push_all(self.rdata.write_cname().as_slice());
} else {
resource_buffer.push_all(self.rdata.write().as_slice());
}
return resource_buffer;
}
// returns the ip address contained within rdata of the resource record
pub fn ip_addr(&mut self) -> Option<SocketAddr> {
if self.rtype == 0x0001 {
let ip_addr = self.rdata.get_ipv4_addr();
let sock = SocketAddr {ip: Ipv4Addr(ip_addr[0], ip_addr[1], ip_addr[2], ip_addr[3]), port: 53};
return Some(sock);
} else {
return None
}
}
pub fn print(&mut self) {
print!("\t");
self.rname.print_as_hostname();
print!("\t");
match self.rtype & 0xFFFF {
0x0001 => {println!("Type: A (Address record) (1)")},
0x0002 => {println!("Type: NS (Name server record) (2)")},
0x0005 => {println!("Type: CNAME (Canonical name record) (5)")},
0x0006 => {println!("Type: SOA (Start of [a zone of] authority record) (6)")},
0x000c => {println!("Type: PTR (Pointer record) (12)")},
0x000f => {println!("Type: MX (Mail exchange record) (15)")},
0x0010 => {println!("Type: TXT (Text record) (16)")},
0x0011 => {println!("Type: RP (Responsible person) (17)")},
0x0012 => {println!("Type: AFSDB (AFS database record) (18)")},
0x0018 => {println!("Type: SIG (Signature) (24)")},
0x0019 => {println!("Type: KEY (Key record) (25)")},
0x001c => {println!("Type: AAAA (IPv6 address record) (28)")},
_ => {println!("somethings wrong")}
}
print!("\t");
match self.rclass & 0xFFFF {
0x0001 => {println!("Class: IN (0x0001)")},
0x0002 => {println!("Class: CS (0x0002)")},
0x0003 => {println!("Class: CH (0x0003)")},
0x0004 => {println!("Class: HS (0x0004)")},
_ => {println!("somethings wrong")}
}
print!("\t");
println!("Time to live: {}", self.ttl);
print!("\t");
println!("Data Length: {}", self.rdlength);
print!("\t");
match self.rtype {
0x0001 => {self.rdata.print_as_ipv4()},
0x0002 => {self.rdata.print_as_hostname()},
0x0005 => {self.rdata.print_as_hostname()},
0x0006 => {self.rdata.print_as_soa()},
0x000c => {return;},
0x000f => {return;},
0x0010 => {return;},
0x0011 => {return;},
0x0012 => {return;},
0x0018 => {return;},
0x0019 => {return;},
0x001c => {self.rdata.print_as_ipv6()},
_ => {panic!("This has not been covered")},
}
println!("");
}
}
// Splits a u16 character into two u8 characters and pushes them to a buffer provided
pub fn split_u16(u: u16, message_buffer: &mut Vec<u8>) {
message_buffer.push(((u & 0xFF00) >> 8) as u8);
message_buffer.push(((u & 0x00FF) >> 0) as u8);
} |
pub use VkSamplerAddressMode::*;
#[repr(u32)]
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub enum VkSamplerAddressMode {
VK_SAMPLER_ADDRESS_MODE_REPEAT = 0,
VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1,
VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2,
VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3,
VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4,
}
|
use std::cmp::Ordering;
use types::Weight;
#[derive(Clone)]
pub struct Suggestion {
value: String,
weight: Weight
}
impl PartialEq for Suggestion {
fn eq(&self, other: &Self) -> bool {
self.weight == other.weight
}
}
impl Eq for Suggestion {}
impl PartialOrd for Suggestion {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Suggestion {
fn cmp(&self, other: &Self) -> Ordering {
other.value.cmp(&self.value)
}
} |
use rand::prelude::*;
use crate::bvh::*;
use crate::material::*;
use crate::ray::Ray;
use crate::texture::Texture;
use crate::vec3::*;
#[derive(Default)]
pub struct HitRecord<'a> {
pub t: f64,
pub u: f64,
pub v: f64,
pub p: Vec3,
pub normal: Vec3,
pub material: Option<&'a Material>,
}
pub trait Hitable: std::fmt::Debug + Send + Sync {
fn hit(&self, r: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord>;
fn bounding_box(&self, t0: f64, t1: f64) -> Option<Aabb>;
fn pdf_value(&self, o: Vec3, v: Vec3) -> f64 {
0.0
}
fn random(&self, o: Vec3) -> Vec3 {
vec3(1.0, 0.0, 0.0)
}
}
#[derive(Debug)]
pub struct Sphere {
pub center: Vec3,
pub radius: f64,
pub material: Material,
}
impl Sphere {
pub fn new(center: Vec3, radius: f64, material: Material) -> Self {
Self {
center,
radius,
material,
}
}
}
impl Hitable for Sphere {
fn hit(&self, r: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {
let oc = r.origin() - self.center;
let a = r.direction().dot(r.direction());
let b = oc.dot(r.direction());
let c = oc.dot(oc) - self.radius * self.radius;
let discriminant = b * b - a * c;
if discriminant > 0.0 {
let mut rec = HitRecord::default();
let mut temp = (-b - discriminant.sqrt()) / a;
if temp < t_max && temp > t_min {
rec.t = temp;
rec.p = r.point_at_parameter(rec.t);
let (u, v) = get_sphere_uv(rec.p);
rec.u = u;
rec.v = v;
rec.normal = (rec.p - self.center) / scalar(self.radius);
rec.material = Some(&self.material);
return Some(rec);
}
temp = (-b + discriminant.sqrt()) / a;
if temp < t_max && temp > t_min {
rec.t = temp;
rec.p = r.point_at_parameter(rec.t);
let (u, v) = get_sphere_uv(rec.p);
rec.u = u;
rec.v = v;
rec.normal = (rec.p - self.center) / scalar(self.radius);
rec.material = Some(&self.material);
return Some(rec);
}
}
None
}
fn bounding_box(&self, _t0: f64, _t1: f64) -> Option<Aabb> {
return Some(Aabb::new(
self.center - scalar(self.radius),
self.center + scalar(self.radius),
));
}
}
#[derive(Debug)]
pub struct HitableList {
pub list: Vec<Box<dyn Hitable>>,
}
impl HitableList {
pub fn new(list: Vec<Box<dyn Hitable>>) -> Self {
Self { list }
}
}
impl Hitable for HitableList {
fn hit(&self, r: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {
let mut hit_anything = None;
let mut closest_so_far = t_max;
for hitable in &self.list {
if let Some(rec) = hitable.hit(r, t_min, closest_so_far) {
closest_so_far = rec.t;
hit_anything = Some(rec);
}
}
return hit_anything;
}
fn bounding_box(&self, t0: f64, t1: f64) -> Option<Aabb> {
if self.list.len() < 1 {
return None;
}
let mut boxy = self.list[0].bounding_box(t0, t1)?;
for i in 1..self.list.len() {
boxy = surrounding_box(boxy, self.list[i].bounding_box(t0, t1)?);
}
Some(boxy)
}
}
fn get_sphere_uv(p: Vec3) -> (f64, f64) {
let phi = p.z.atan2(p.x);
let theta = p.y.asin();
let pi = std::f64::consts::PI;
let u = 1.0 - (phi + pi) / (2.0 * pi);
let v = (theta + pi / 2.0) / pi;
(u, v)
}
#[derive(Debug, Clone)]
pub struct XYRect {
pub material: Material,
pub x0: f64,
pub x1: f64,
pub y0: f64,
pub y1: f64,
pub k: f64,
}
impl XYRect {
pub fn new(x0: f64, x1: f64, y0: f64, y1: f64, k: f64, material: Material) -> Self {
Self {
x0,
x1,
y0,
y1,
k,
material,
}
}
}
impl Hitable for XYRect {
fn hit(&self, r: &Ray, t0: f64, t1: f64) -> Option<HitRecord> {
let t = (self.k - r.origin().z) / r.direction().z;
if t < t0 || t > t1 {
return None;
}
let x = r.origin().x + t * r.direction().x;
let y = r.origin().y + t * r.direction().y;
if x < self.x0 || x > self.x1 || y < self.y0 || y > self.y1 {
return None;
}
Some(HitRecord {
u: (x - self.x0) / (self.x1 - self.x0),
v: (y - self.y0) / (self.y1 - self.y0),
t: t,
material: Some(&self.material),
p: r.point_at_parameter(t),
normal: vec3(0.0, 0.0, 1.0),
})
}
fn bounding_box(&self, _t0: f64, _t1: f64) -> Option<Aabb> {
Some(Aabb::new(
vec3(self.x0, self.y0, self.k - 0.0001),
vec3(self.x1, self.y1, self.k + 0.0001),
))
}
}
#[derive(Debug, Clone)]
pub struct XZRect {
pub material: Material,
pub x0: f64,
pub x1: f64,
pub z0: f64,
pub z1: f64,
pub k: f64,
}
impl XZRect {
pub fn new(x0: f64, x1: f64, z0: f64, z1: f64, k: f64, material: Material) -> Self {
Self {
x0,
x1,
z0,
z1,
k,
material,
}
}
}
impl Hitable for XZRect {
fn hit(&self, r: &Ray, t0: f64, t1: f64) -> Option<HitRecord> {
let t = (self.k - r.origin().y) / r.direction().y;
if t < t0 || t > t1 {
return None;
}
let x = r.origin().x + t * r.direction().x;
let z = r.origin().z + t * r.direction().z;
if x < self.x0 || x > self.x1 || z < self.z0 || z > self.z1 {
return None;
}
Some(HitRecord {
u: (x - self.x0) / (self.x1 - self.x0),
v: (z - self.z0) / (self.z1 - self.z0),
t: t,
material: Some(&self.material),
p: r.point_at_parameter(t),
normal: vec3(0.0, 1.0, 0.0),
})
}
fn bounding_box(&self, _t0: f64, _t1: f64) -> Option<Aabb> {
Some(Aabb::new(
vec3(self.x0, self.k - 0.0001, self.z0),
vec3(self.x1, self.k + 0.0001, self.z1),
))
}
fn pdf_value(&self, o: Vec3, v: Vec3) -> f64 {
if let Some(rec) = self.hit(&Ray::new(o, v), 0.001, std::f64::MAX) {
let area = (self.x1 - self.x0) * (self.z1 - self.z0);
let distance_squared = rec.t * rec.t * v.squared_length();
let cosine = (v.dot(rec.normal) / v.length()).abs();
distance_squared / (cosine * area)
} else {
0.0
}
}
fn random(&self, o: Vec3) -> Vec3 {
let mut rng = rand::thread_rng();
let random_point = vec3(
self.x0 + rng.gen::<f64>() * (self.x1 - self.x0),
self.k,
self.z0 + rng.gen::<f64>() * (self.z1 - self.z0),
);
random_point - o
}
}
#[derive(Debug, Clone)]
pub struct YZRect {
pub material: Material,
pub y0: f64,
pub y1: f64,
pub z0: f64,
pub z1: f64,
pub k: f64,
}
impl YZRect {
pub fn new(y0: f64, y1: f64, z0: f64, z1: f64, k: f64, material: Material) -> Self {
Self {
y0,
y1,
z0,
z1,
k,
material,
}
}
}
impl Hitable for YZRect {
fn hit(&self, r: &Ray, t0: f64, t1: f64) -> Option<HitRecord> {
let t = (self.k - r.origin().x) / r.direction().x;
if t < t0 || t > t1 {
return None;
}
let y = r.origin().y + t * r.direction().y;
let z = r.origin().z + t * r.direction().z;
if y < self.y0 || y > self.y1 || z < self.z0 || z > self.z1 {
return None;
}
Some(HitRecord {
u: (y - self.y0) / (self.y1 - self.y0),
v: (z - self.z0) / (self.z1 - self.z0),
t: t,
material: Some(&self.material),
p: r.point_at_parameter(t),
normal: vec3(1.0, 0.0, 0.0),
})
}
fn bounding_box(&self, _t0: f64, _t1: f64) -> Option<Aabb> {
Some(Aabb::new(
vec3(self.k - 0.0001, self.y0, self.z0),
vec3(self.k + 0.0001, self.y1, self.z1),
))
}
}
#[derive(Debug, Clone)]
pub enum FlipNormals {
OkayXY(XYRect),
OkayXZ(XZRect),
OkayYZ(YZRect),
}
use FlipNormals::*;
impl FlipNormals {
pub fn new_xy(hitable: XYRect) -> Self {
OkayXY(hitable)
}
pub fn new_xz(hitable: XZRect) -> Self {
OkayXZ(hitable)
}
pub fn new_yz(hitable: YZRect) -> Self {
OkayYZ(hitable)
}
}
impl Hitable for FlipNormals {
fn hit(&self, r: &Ray, t0: f64, t1: f64) -> Option<HitRecord> {
let rec = match &self {
OkayXY(hitable) => hitable.hit(r, t0, t1),
OkayXZ(hitable) => hitable.hit(r, t0, t1),
OkayYZ(hitable) => hitable.hit(r, t0, t1),
};
if let Some(mut rec) = rec {
rec.normal = -rec.normal;
return Some(rec);
}
None
}
fn bounding_box(&self, t0: f64, t1: f64) -> Option<Aabb> {
match &self {
OkayXY(hitable) => hitable.bounding_box(t0, t1),
OkayXZ(hitable) => hitable.bounding_box(t0, t1),
OkayYZ(hitable) => hitable.bounding_box(t0, t1),
}
}
}
#[derive(Debug)]
pub struct Cuboid {
pmin: Vec3,
pmax: Vec3,
list: HitableList,
}
impl Hitable for Cuboid {
fn hit(&self, r: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {
self.list.hit(r, t_min, t_max)
}
fn bounding_box(&self, _t0: f64, _t1: f64) -> Option<Aabb> {
Some(Aabb::new(self.pmin, self.pmax))
}
}
impl Cuboid {
pub fn new(p0: Vec3, p1: Vec3, material: Material) -> Self {
let list: Vec<Box<dyn Hitable>> = vec![
Box::new(XYRect::new(p0.x, p1.x, p0.y, p1.y, p1.z, material.clone())),
Box::new(FlipNormals::new_xy(XYRect::new(
p0.x,
p1.x,
p0.y,
p1.y,
p0.z,
material.clone(),
))),
Box::new(XZRect::new(
p0.x,
p1.x,
p0.z,
p1.z,
p1.y + 0.01,
material.clone(),
)),
Box::new(FlipNormals::new_xz(XZRect::new(
p0.x,
p1.x,
p0.z,
p1.z,
p0.y,
material.clone(),
))),
Box::new(YZRect::new(p0.y, p1.y, p0.z, p1.z, p1.x, material.clone())),
Box::new(FlipNormals::new_yz(YZRect::new(
p0.y,
p1.y,
p0.z,
p1.z,
p0.x,
material.clone(),
))),
];
Self {
pmin: p0,
pmax: p1,
list: HitableList::new(list),
}
}
}
#[derive(Debug)]
pub struct Translate {
hitable: Box<dyn Hitable>,
offset: Vec3,
}
impl Translate {
pub fn new(hitable: Box<dyn Hitable>, offset: Vec3) -> Self {
Self { hitable, offset }
}
}
impl Hitable for Translate {
fn hit(&self, r: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {
let moved_r = Ray::new(r.origin() - self.offset, r.direction());
if let Some(mut rec) = self.hitable.hit(&moved_r, t_min, t_max) {
rec.p += self.offset;
return Some(rec);
}
None
}
fn bounding_box(&self, t0: f64, t1: f64) -> Option<Aabb> {
if let Some(boxy) = self.hitable.bounding_box(t0, t1) {
return Some(Aabb::new(boxy.min + self.offset, boxy.max + self.offset));
}
None
}
}
#[derive(Debug)]
pub struct RotateY {
hitable: Box<dyn Hitable>,
sin_theta: f64,
cos_theta: f64,
hasbox: bool,
boxy: Aabb,
}
impl RotateY {
pub fn new(hitable: Box<dyn Hitable>, angle: f64) -> Self {
let pi = std::f64::consts::PI;
let radians = (pi / 180.0) * angle;
let sin_theta = radians.sin();
let cos_theta = radians.cos();
let boxy = hitable.bounding_box(0.0, 1.0);
let hasbox = boxy.is_some();
let mut min = scalar(std::f64::MAX);
let mut max = scalar(std::f64::MIN);
//WHAT?
let bbox = boxy.expect("DOUBLE CHECK WHAT HAPPENS HERE"); //unwrap_or(Aabb::new(scalar(0.0),scalar(0.0)));
for i in 0..2 {
for j in 0..2 {
for k in 0..2 {
let x = i as f64 * bbox.max.x + (1.0 - i as f64) * bbox.min.x;
let y = j as f64 * bbox.max.y + (1.0 - j as f64) * bbox.min.y;
let z = k as f64 * bbox.max.z + (1.0 - k as f64) * bbox.min.z;
let newx = cos_theta * x + sin_theta * z;
let newz = -sin_theta * x + cos_theta * z;
let tester = vec3(newx, y, newz);
for c in 0..3 {
if tester[c] > max[c] {
max[c] = tester[c];
}
if tester[c] < min[c] {
min[c] = tester[c];
}
}
}
}
}
Self {
hitable,
sin_theta,
cos_theta,
hasbox,
boxy: Aabb::new(min, max),
}
}
}
impl Hitable for RotateY {
fn hit(&self, r: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {
let mut origin = r.origin();
let mut direction = r.direction();
let cos_theta = self.cos_theta;
let sin_theta = self.sin_theta;
origin.x = cos_theta * r.origin().x - sin_theta * r.origin().z;
origin.z = sin_theta * r.origin().x + cos_theta * r.origin().z;
direction.x = cos_theta * r.direction().x - sin_theta * r.direction().z;
direction.z = sin_theta * r.direction().x + cos_theta * r.direction().z;
let rotated_r = Ray::new(origin, direction);
if let Some(mut rec) = self.hitable.hit(&rotated_r, t_min, t_max) {
let mut p = rec.p;
let mut normal = rec.normal;
p.x = cos_theta * rec.p.x + sin_theta * rec.p.z;
p.z = -sin_theta * rec.p.x + cos_theta * rec.p.z;
normal.x = cos_theta * rec.normal.x + sin_theta * rec.normal.z;
normal.z = -sin_theta * rec.normal.x + cos_theta * rec.normal.z;
rec.p = p;
rec.normal = normal;
return Some(rec);
}
None
}
fn bounding_box(&self, _t0: f64, _t1: f64) -> Option<Aabb> {
if self.hasbox {
Some(self.boxy)
} else {
None
}
}
}
/*
#[derive(Debug)]
pub struct ConstantMedium {
pub boundary: Box<dyn Hitable>,
pub density: f64,
pub phase_function: Material,
}
impl ConstantMedium {
pub fn new(boundary: Box<dyn Hitable>, density: f64, a: Texture) -> Self {
Self {
boundary,
density,
phase_function: Isotropic::new(a),
}
}
}
impl Hitable for ConstantMedium {
fn hit(&self, r: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {
let mut rng = rand::thread_rng();
//let mut db = rng.gen::<f64>() < 0.00001;
//db = false;
if let Some(mut rec1) = self.boundary.hit(r, std::f64::MIN, std::f64::MAX) {
if let Some(mut rec2) = self.boundary.hit(r, rec1.t + 0.0001, std::f64::MAX) {
if rec1.t < t_min {
rec1.t = t_min;
}
if rec2.t > t_max {
rec2.t = t_max;
}
if rec1.t >= rec2.t {
return None;
}
if rec1.t < 0.0 {
rec1.t = 0.0;
}
let distance_inside_boundary = (rec2.t - rec1.t) * r.direction().length();
// TODO: CHECK C++ LOG FUNCTION BASE
let hit_distance = -(1.0 / self.density) * rng.gen::<f64>().ln();
if hit_distance < distance_inside_boundary {
let t = rec1.t + hit_distance / r.direction().length();
return Some(HitRecord {
t: t,
u: 0.0,
v: 0.0,
p: r.point_at_parameter(t),
normal: vec3(1.0, 0.0, 0.0), // arbitrary
material: Some(&self.phase_function),
});
}
}
}
None
}
fn bounding_box(&self, t0: f64, t1: f64) -> Option<Aabb> {
self.boundary.bounding_box(t0, t1)
}
}
*/
|
/*!
This is merged into a default manifest in order to form the full package manifest:
```cargo
[dependencies]
boolinator = "=0.1.0"
```
*/
extern crate boolinator;
use boolinator::Boolinator;
fn main() {
println!("--output--");
println!("{:?}", true.as_some(1));
}
|
use crate::prelude::*;
#[derive(Debug)]
pub enum Side {
Left,
Right,
Bottom,
Top,
}
pub struct Cell {
pub rect: Rect,
pub column_index: usize,
pub row_index: usize,
}
impl Cell {
pub fn contains(&self, point: &Point2) -> bool {
self.rect.left() <= point.x
&& point.x <= self.rect.right()
&& self.rect.bottom() <= point.y
&& point.y <= self.rect.top()
}
}
pub struct Grid {
pub cells: Vec<Cell>,
pub num_columns: usize,
pub num_rows: usize,
pub container: Rect,
}
impl Grid {
pub fn unit(num_columns: usize, num_rows: usize) -> Grid {
Grid::new(Rect::unit(), num_columns, num_rows)
}
pub fn new(container: Rect, num_columns: usize, num_rows: usize) -> Grid {
let num_cells_in_grid = num_columns * num_rows;
let mut cells = Vec::with_capacity(num_cells_in_grid as usize);
let column_width = container.w() as f32 / num_columns as f32;
let row_height = container.h() as f32 / num_rows as f32;
let cell_wh = Vector2::new(column_width, row_height);
for column_index in 0..num_columns {
for row_index in 0..num_rows {
let normalized_x = column_index as f32 / num_columns as f32;
let normalized_y = row_index as f32 / num_rows as f32;
let top_left = container.denormalize_x_y(normalized_x, normalized_y);
let bottom_right = top_left + cell_wh;
let rect = Rect::from_corners(top_left, bottom_right);
let cell = Cell {
rect,
column_index,
row_index,
};
cells.push(cell);
}
}
Grid {
num_columns,
num_rows,
cells,
container,
}
}
pub fn index_for(&self, column: usize, row: usize) -> usize {
let index = (self.num_rows * column) + row;
let index_usize = index as usize;
if index_usize > self.cells.len() {
let message = format!(
"Tried to get an index for coordinates ({}, {}), but the grid is only {}x{}.",
column, row, self.num_columns, self.num_rows
);
panic!("{}", message);
}
index_usize
}
pub fn random_edge_index(&self, sides: &Vec<Side>, rand: &mut Rand) -> usize {
// let sides = vec![Side::Left, Side::Right, Side::Bottom, Side::Top];
let random_side = rand.element(sides);
let x;
let y;
match random_side {
Side::Left => {
x = 0;
y = rand.range(0, self.max_y());
}
Side::Right => {
x = self.max_x();
y = rand.range(0, self.max_y());
}
Side::Bottom => {
x = rand.range(0, self.max_x());
y = 0;
}
Side::Top => {
x = rand.range(0, self.max_x());
y = self.max_y();
}
}
self.index_for(x, y)
}
pub fn random_edge_index_from_any_side(&self, rand: &mut Rand) -> usize {
let sides = vec![Side::Left, Side::Right, Side::Bottom, Side::Top];
self.random_edge_index(&sides, rand)
}
pub fn max_x(&self) -> usize {
self.num_columns - 1
}
pub fn max_y(&self) -> usize {
self.num_rows - 1
}
pub fn find_xy(&self, point: &Point2) -> Option<&Cell> {
self.cells.iter().find(|cell| cell.contains(point))
}
pub fn find_xy_cell_index(&self, point: &Point2) -> Option<usize> {
self.cells.iter().position(|cell| cell.contains(point))
}
}
|
/*
* Copyright (C) 2020 Zixiao Han
*/
static OVERHEAD_TIME: u128 = 50;
pub struct TimeCapacity {
pub main_time_millis: u128,
pub extra_time_millis: u128,
}
pub fn calculate_time_capacity(total_time_millis: u128, moves_to_go: u128, increment: u128) -> TimeCapacity {
let main_time_millis = total_time_millis / (1 + moves_to_go * 5 / 10) + increment / 2;
let extra_time_millis = if total_time_millis > main_time_millis {
(total_time_millis - main_time_millis) / moves_to_go
} else {
0
};
if main_time_millis > OVERHEAD_TIME {
TimeCapacity {
main_time_millis: main_time_millis - OVERHEAD_TIME,
extra_time_millis: extra_time_millis,
}
} else {
TimeCapacity {
main_time_millis: main_time_millis / 2,
extra_time_millis: 0,
}
}
}
|
use actix::prelude::*;
use crate::executor::Executor;
use crate::blender::Runner;
use crate::messages::{
WorkerWsConnect,
GetStatus,
StartRender,
JobStatusUpdate,
StatusUpdate,
JobStatus,
Status
};
pub struct WorkerState {
pub started: bool,
job_status: Option<JobStatus>,
listeners: Vec<Recipient<JobStatusUpdate>>
}
impl WorkerState {
pub fn new() -> Self {
Self {
started: false,
job_status: None,
listeners: Vec::new()
}
}
fn update_clients(&self) {
println!("[WorkerState] notify_clients()");
if let None = self.job_status {
return;
}
// TODO: too much cloning
let status_to_send = JobStatusUpdate {
status: Status::Working {
job_status: self.job_status.clone().unwrap()
}
};
for listener in self.listeners.iter() {
println!("[WorkerState] Sending to recipient");
listener
.do_send(status_to_send.clone())
.unwrap();
}
}
}
impl Actor for WorkerState {
type Context = Context<Self>;
fn started(&mut self, _ctx: &mut Self::Context) {
println!("[WorkerState] Started");
}
}
impl Handler<WorkerWsConnect> for WorkerState {
type Result = ();
fn handle(&mut self, msg: WorkerWsConnect, _ctx: &mut Context<Self>) {
println!("[WorkerState] Someone joined");
self.listeners.push(msg.addr);
self.update_clients();
}
}
impl Handler<GetStatus> for WorkerState {
type Result = Status;
fn handle(&mut self, _msg: GetStatus, _ctx: &mut Context<Self>) -> Self::Result {
match self.job_status {
Some(ref job_status) => {
Status::Working {
job_status: job_status.clone()
}
},
None => {
Status::Ready
}
}
}
}
impl Handler<StartRender> for WorkerState {
type Result = ();
fn handle(&mut self, msg: StartRender, ctx: &mut Context<Self>) {
println!("[WorkerState] Starting render, from frame {} to frame {}", msg.frame_start, msg.frame_end);
let frame_start = msg.frame_start;
let frame_end = msg.frame_end;
let frames_to_render = frame_end - frame_start + 1;
self.job_status = Some(JobStatus {
frame_start,
frame_end,
frames_to_render,
render_times: Vec::new()
});
let worker_state_address = ctx.address();
let executor_arbiter = Arbiter::new();
Executor::start_in_arbiter(&executor_arbiter, move |ctx: &mut Context<Executor>| {
let mut runner = Runner::new(frame_start, frame_end);
let execution = runner.execute();
Executor::add_stream(execution, ctx);
Executor::new(worker_state_address)
});
}
}
impl Handler<StatusUpdate> for WorkerState {
type Result = ();
fn handle(&mut self, msg: StatusUpdate, _ctx: &mut Context<Self>) -> Self::Result {
match msg {
StatusUpdate::Started => {
println!("[WorkerState] Render started!");
},
StatusUpdate::RenderedFrame { frame_number, render_time } => {
println!("[WorkerState] Rendered frame {} in {} ms", frame_number, render_time);
let job_status = self.job_status.as_mut().unwrap();
job_status.render_times.push(render_time);
self.update_clients();
},
StatusUpdate::Finished => {
println!("[WorkerState] Finished!");
println!("[WorkerState] {:#?}", self.job_status.as_ref().unwrap());
},
_ => unreachable!()
}
}
}
|
#![allow(unused_imports)]
mod logic;
mod arithmetic;
mod sequential;
mod architecture;
use logic::*;
use logic::bit::{O, I};
use architecture::*;
use arithmetic::*;
use sequential::*;
use sequential::ClockState::{Tick, Tock};
fn main() {
let mut computer = Computer::new();
computer.run();
computer.run();
// let mut pc = PC::new();
// let mut clock = Clock::new();
// let mut memory = Memory::new();
// // clock.next();
// for _ in 0..80 {
// memory.keyboard_input(&clock);
// pc.input(&clock, Word::new([I; 16]), I, O, O);
// let out = pc.output(&clock);
// memory.input(&clock, Word::new([I; 16]), [O, O, O, O, O, O, O, O, O, O, O, O, O, O, I], I);
// let memory_out = memory.output(&clock, [O, O, O, O, O, O, O, O, O, O, O, O, O, O, I]);
// // println!("");
// // println!("times: {}, PC: {:?}", i, pc);
// if clock.state() == Tock {
// println!("");
// println!("{}", out);
// println!("memory");
// println!("{}", memory_out);
// }
// clock.next();
// }
// let mut clock = Clock::new();
// loop {
// clock.next();
// println!("{:?}", clock.state());
// }
}
|
use super::*;
/// class 文件版本:先minor,后major。顺序不可修改
#[derive(Debug)]
pub struct Version {
pub minor: U2,
pub major: U2,
}
|
//! Internal proof format for the Varisat SAT solver.
use varisat_formula::{Lit, Var};
pub mod binary_format;
mod vli_enc;
// Integer type used to store a hash of a clause.
pub type ClauseHash = u64;
/// Hash a single literal.
///
/// Multiple literals can be combined with xor, as done in [`clause_hash`].
pub fn lit_hash(lit: Lit) -> ClauseHash {
lit_code_hash(lit.code())
}
/// Hash a single literal from a code.
///
/// This doesn't require the code to correspond a valid literal.
pub fn lit_code_hash(lit_code: usize) -> ClauseHash {
// Constant based on the golden ratio provides good mixing for the resulting upper bits
(!(lit_code as u64)).wrapping_mul(0x61c8864680b583ebu64)
}
/// A fast hash function for clauses (or other *sets* of literals).
///
/// This hash function interprets the given slice as a set and will not change when the input is
/// permuted. It does not handle duplicated items.
pub fn clause_hash(lits: &[Lit]) -> ClauseHash {
let mut hash = 0;
for &lit in lits {
hash ^= lit_hash(lit);
}
hash
}
/// Justifications for a simple clause deletion.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum DeleteClauseProof {
/// The clause is known to be redundant.
Redundant,
/// The clause is irred and subsumed by the clause added in the previous step.
Simplified,
/// The clause contains a true literal.
///
/// Also used to justify deletion of tautological clauses.
Satisfied,
}
/// A single proof step.
///
/// Represents a mutation of the current formula and a justification for the mutation's validity.
#[derive(Copy, Clone, Debug)]
pub enum ProofStep<'a> {
/// Update the global to solver var mapping.
///
/// For proof checking, the solver variable names are only used for hash computations.
SolverVarName { global: Var, solver: Option<Var> },
/// Update the global to user var mapping.
///
/// A variable without user mapping is considered hidden by the checker. When a variable without
/// user mapping gets a user mapping, the sampling mode is initialized to witness.
///
/// It's not allowed to change a variable from one user name to another when the variable is in
/// use.
///
/// Clause additions and assumptions are only allowed to use variables with user mappings (and a
/// non-witness sampling mode).
UserVarName { global: Var, user: Option<Var> },
/// Delete a variable.
///
/// This is only allowed for variables that are isolated and hidden.
DeleteVar { var: Var },
/// Changes the sampling mode of a variable.
///
/// This is only used to change between Sample and Witness. Hidden is managed by adding or
/// removing a user var name.
ChangeSamplingMode { var: Var, sample: bool },
/// Add a new input clause.
///
/// This is only emitted for clauses added incrementally after an initial solve call.
AddClause { clause: &'a [Lit] },
/// Add a clause that is an asymmetric tautoligy (AT).
///
/// Assuming the negation of the clause's literals leads to a unit propagation conflict.
///
/// The second slice contains the hashes of all clauses involved in the resulting conflict. The
/// order of hashes is the order in which the clauses propagate when all literals of the clause
/// are set false.
///
/// When generating DRAT proofs the second slice is ignored and may be empty.
AtClause {
redundant: bool,
clause: &'a [Lit],
propagation_hashes: &'a [ClauseHash],
},
/// Unit clauses found by top-level unit-propagation.
///
/// Pairs of unit clauses and the original clause that became unit. Clauses are in chronological
/// order. This is equivalent to multiple `AtClause` steps where the clause is unit and the
/// propagation_hashes field contains just one hash, with the difference that this is not output
/// for DRAT proofs.
///
/// Ignored when generating DRAT proofs.
UnitClauses { units: &'a [(Lit, ClauseHash)] },
/// Delete a clause consisting of the given literals.
DeleteClause {
clause: &'a [Lit],
proof: DeleteClauseProof,
},
/// Change the number of clause hash bits used
ChangeHashBits { bits: u32 },
/// A (partial) assignment that satisfies all clauses and assumptions.
Model { assignment: &'a [Lit] },
/// Change the active set of assumptions.
///
/// This is checked against future model or failed assumptions steps.
Assumptions { assumptions: &'a [Lit] },
/// A subset of the assumptions that make the formula unsat.
FailedAssumptions {
failed_core: &'a [Lit],
propagation_hashes: &'a [ClauseHash],
},
/// Signals the end of a proof.
///
/// A varisat proof must end with this command or else the checker will complain about an
/// incomplete proof.
End,
}
impl<'a> ProofStep<'a> {
/// Does this proof step use clause hashes?
pub fn contains_hashes(&self) -> bool {
match self {
ProofStep::AtClause { .. }
| ProofStep::UnitClauses { .. }
| ProofStep::FailedAssumptions { .. } => true,
ProofStep::SolverVarName { .. }
| ProofStep::UserVarName { .. }
| ProofStep::DeleteVar { .. }
| ProofStep::ChangeSamplingMode { .. }
| ProofStep::AddClause { .. }
| ProofStep::DeleteClause { .. }
| ProofStep::ChangeHashBits { .. }
| ProofStep::Model { .. }
| ProofStep::Assumptions { .. }
| ProofStep::End => false,
}
}
}
|
#![feature(async_await, await_macro, futures_api)]
use romio::tcp::{TcpListener, TcpStream};
use futures::prelude::*;
async fn say_hello(mut stream: TcpStream) {
await!(stream.write_all(b"Shall I hear more, or shall I speak at this?"));
}
async fn listen() -> Result<(), Box<dyn std::error::Error + 'static>> {
let socket_addr = "127.0.0.1:8080".parse()?;
let listener = TcpListener::bind(&socket_addr)?;
let mut incoming = listener.incoming();
// accept connections and process them serially
while let Some(stream) = await!(incoming.next()) {
await!(say_hello(stream?));
}
Ok(())
}
|
use chrono::{DateTime, TimeZone, Utc};
use csv::Reader;
use reqwest::{get, Url};
use serde::Deserialize;
use serde::de::{self, Deserializer};
use std::error::Error;
static APIBASEURL: &str = "https://www.alphavantage.co/query";
static YMD_HMS: &str = "%Y-%m-%d %H:%M:%S";
// TODO: Move this into a time utils module if needed
pub fn parse_utcdatetime<'de, D: Deserializer<'de>>(deserializer: D) -> Result<DateTime<Utc>, D::Error> {
let s = String::deserialize(deserializer)?;
Utc.datetime_from_str(&s, YMD_HMS).map_err(de::Error::custom)
}
#[derive(Debug, Deserialize)]
pub struct TimeSeriesRecord {
#[serde(deserialize_with = "parse_utcdatetime")]
timestamp: DateTime<Utc>,
open: f64,
high: f64,
low: f64,
close: f64,
volume: f64,
}
pub enum Interval {
OneMin,
FiveMin,
FifteenMin,
ThirtyMin,
SixtyMin,
}
impl AsRef<str> for Interval {
fn as_ref(&self) -> &'static str {
match *self {
Interval::OneMin => "1min",
Interval::FiveMin => "5min",
Interval::FifteenMin => "15min",
Interval::ThirtyMin => "30min",
Interval::SixtyMin => "60min",
}
}
}
pub enum TimeSeries {
Intraday,
}
impl AsRef<str> for TimeSeries {
fn as_ref(&self) -> &'static str {
match *self {
TimeSeries::Intraday => "TIME_SERIES_INTRADAY",
}
}
}
impl TimeSeries {
pub fn query(&self, symbol: &str, interval: Interval, api_key: &str, compact: bool) -> Result<Vec<TimeSeriesRecord>, Box<Error>> {
let output_size = if compact {
"compact"
} else {
"full"
};
let params = vec![
("function", self.as_ref()),
("symbol", symbol),
("interval", interval.as_ref()),
("apikey", api_key),
("outputsize", output_size),
("datatype", "csv"),
];
let url = Url::parse_with_params(APIBASEURL, ¶ms)?;
let csv = get(url)?.text()?;
let mut reader = Reader::from_reader(csv.as_bytes());
let records: Result<Vec<TimeSeriesRecord>, _> = reader.deserialize().collect();
Ok(records?)
}
}
|
mod warp;
mod random;
// Maybe I shouldn't name it warp...
pub use crate::utils::warp::*;
pub use random::*;
|
const STROAGE_PATH: &'static str = r#"C:\Program Files (x86)\Warcraft III\Data"#;
const TEST_FILE: &'static str = r#"war3.w3mod:scripts\blizzard.j"#;
#[test]
fn test_all() {
let storage = casclib::open(STROAGE_PATH).unwrap();
let count = storage.file_count();
assert!(count > 0);
let mut walked = 0;
let mut map_file_count = 0;
for r in storage.files_with_mask("war3.w3mod:maps") {
walked = walked + 1;
let entry = r.expect("file entry");
let _name = entry.get_name();
map_file_count = map_file_count + 1;
}
assert!(map_file_count > 0);
let _bj = storage.entry(TEST_FILE).open().unwrap();
}
#[cfg(target_os = "windows")]
#[test]
fn test_read_unicode() {
use widestring::U16CString;
use std::os::windows::ffi::OsStringExt;
use std::ffi::OsString;
let storage = casclib::open(OsString::from_wide(&U16CString::from_str(r#"C:\Program Files (x86)\Warcraft III中文\Data"#).unwrap().into_vec())).unwrap();
let count = storage.file_count();
assert!(count > 0);
let mut walked = 0;
let mut map_file_count = 0;
for r in storage.files_with_mask("war3.w3mod:maps") {
walked = walked + 1;
let entry = r.expect("file entry");
let _name = entry.get_name();
map_file_count = map_file_count + 1;
}
assert!(map_file_count > 0);
let _bj = storage.entry(TEST_FILE).open().unwrap();
}
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/// Module-level assembly support.
///
/// The macro defined here allows you to specify "top-level",
/// "file-scoped", or "module-level" assembly. These synonyms
/// all correspond to LLVM's module-level inline assembly instruction.
///
/// For example, `global_asm!("some assembly here")` codegens to
/// LLVM's `module asm "some assembly here"`. All of LLVM's caveats
/// therefore apply.
use syntax::ast;
use syntax::codemap::respan;
use syntax::ext::base;
use syntax::ext::base::*;
use syntax::feature_gate;
use syntax::ptr::P;
use syntax::symbol::Symbol;
use syntax_pos::Span;
use syntax::tokenstream;
use syntax::util::small_vector::SmallVector;
pub const MACRO: &'static str = "global_asm";
pub fn expand_global_asm<'cx>(cx: &'cx mut ExtCtxt,
sp: Span,
tts: &[tokenstream::TokenTree]) -> Box<dyn base::MacResult + 'cx> {
if !cx.ecfg.enable_global_asm() {
feature_gate::emit_feature_err(&cx.parse_sess,
MACRO,
sp,
feature_gate::GateIssue::Language,
feature_gate::EXPLAIN_GLOBAL_ASM);
return DummyResult::any(sp);
}
let mut p = cx.new_parser_from_tts(tts);
let (asm, _) = match expr_to_string(cx,
panictry!(p.parse_expr()),
"inline assembly must be a string literal") {
Some((s, st)) => (s, st),
None => return DummyResult::any(sp),
};
MacEager::items(SmallVector::one(P(ast::Item {
ident: ast::Ident::with_empty_ctxt(Symbol::intern("")),
attrs: Vec::new(),
id: ast::DUMMY_NODE_ID,
node: ast::ItemKind::GlobalAsm(P(ast::GlobalAsm {
asm,
ctxt: cx.backtrace(),
})),
vis: respan(sp.shrink_to_lo(), ast::VisibilityKind::Inherited),
span: sp,
tokens: None,
})))
}
|
//! Utilities used for testing and benchmarking.
pub mod ffo;
pub mod perft;
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use failure::{format_err, Error};
use std::ops::Deref;
use std::time::{Duration, SystemTime};
use token_cache::{CacheKey, CacheToken, KeyFor};
/// Representation of a single OAuth token including its expiry time.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct OAuthToken {
expiry_time: SystemTime,
token: String,
}
impl CacheToken for OAuthToken {
fn expiry_time(&self) -> &SystemTime {
&self.expiry_time
}
}
impl Deref for OAuthToken {
type Target = str;
fn deref(&self) -> &str {
&*self.token
}
}
impl From<fidl_fuchsia_auth::AuthToken> for OAuthToken {
fn from(auth_token: fidl_fuchsia_auth::AuthToken) -> OAuthToken {
OAuthToken {
expiry_time: SystemTime::now() + Duration::from_secs(auth_token.expires_in),
token: auth_token.token,
}
}
}
/// Representation of a single Firebase token including its expiry time.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct FirebaseAuthToken {
id_token: String,
local_id: Option<String>,
email: Option<String>,
expiry_time: SystemTime,
}
impl CacheToken for FirebaseAuthToken {
fn expiry_time(&self) -> &SystemTime {
&self.expiry_time
}
}
impl From<fidl_fuchsia_auth::FirebaseToken> for FirebaseAuthToken {
fn from(firebase_token: fidl_fuchsia_auth::FirebaseToken) -> FirebaseAuthToken {
FirebaseAuthToken {
id_token: firebase_token.id_token,
local_id: firebase_token.local_id,
email: firebase_token.email,
expiry_time: SystemTime::now() + Duration::from_secs(firebase_token.expires_in),
}
}
}
impl FirebaseAuthToken {
/// Returns a new FIDL `FirebaseToken` using data cloned from our
/// internal representation.
pub fn to_fidl(&self) -> fidl_fuchsia_auth::FirebaseToken {
fidl_fuchsia_auth::FirebaseToken {
id_token: self.id_token.clone(),
local_id: self.local_id.clone(),
email: self.email.clone(),
expires_in: match self.expiry_time.duration_since(SystemTime::now()) {
Ok(duration) => duration.as_secs(),
Err(_) => 0,
},
}
}
}
/// Key for storing OAuth access tokens in the token cache.
#[derive(Debug, PartialEq, Eq)]
pub struct AccessTokenKey {
auth_provider_type: String,
user_profile_id: String,
scopes: String,
}
impl CacheKey for AccessTokenKey {
fn auth_provider_type(&self) -> &str {
&self.auth_provider_type
}
fn user_profile_id(&self) -> &str {
&self.user_profile_id
}
fn subkey(&self) -> &str {
&self.scopes
}
}
impl KeyFor for AccessTokenKey {
type TokenType = OAuthToken;
}
impl AccessTokenKey {
/// Create a new access token key.
pub fn new<T: Deref<Target = str>>(
auth_provider_type: String,
user_profile_id: String,
scopes: &[T],
) -> Result<AccessTokenKey, Error> {
validate_provider_and_id(&auth_provider_type, &user_profile_id)?;
Ok(AccessTokenKey {
auth_provider_type: auth_provider_type,
user_profile_id: user_profile_id,
scopes: Self::combine_scopes(scopes),
})
}
fn combine_scopes<T: Deref<Target = str>>(scopes: &[T]) -> String {
// Use the scope strings concatenated with a newline as the key. Note that this
// is order dependent; a client that requested the same scopes with two
// different orders would create two cache entries. We argue that the
// harm of this is limited compared to the cost of sorting scopes to
// create a canonical ordering on every access. Most clients are likely
// to use a consistent order anyway and we request this behaviour in the
// interface. TODO(satsukiu): Consider a zero-copy solution for the
// simple case of a single scope.
match scopes.len() {
0 => String::from(""),
1 => scopes.first().unwrap().to_string(),
_ => String::from(scopes.iter().fold(String::new(), |acc, el| {
let sep = if acc.is_empty() { "" } else { "\n" };
acc + sep + el
})),
}
}
}
/// Key for storing OpenID tokens in the token cache.
#[derive(Debug, PartialEq, Eq)]
pub struct IdTokenKey {
auth_provider_type: String,
user_profile_id: String,
audience: String,
}
impl CacheKey for IdTokenKey {
fn auth_provider_type(&self) -> &str {
&self.auth_provider_type
}
fn user_profile_id(&self) -> &str {
&self.user_profile_id
}
fn subkey(&self) -> &str {
&self.audience
}
}
impl KeyFor for IdTokenKey {
type TokenType = OAuthToken;
}
impl IdTokenKey {
/// Create a new ID token key.
pub fn new(
auth_provider_type: String,
user_profile_id: String,
audience: String,
) -> Result<IdTokenKey, Error> {
validate_provider_and_id(&auth_provider_type, &user_profile_id)?;
Ok(IdTokenKey {
auth_provider_type: auth_provider_type,
user_profile_id: user_profile_id,
audience: audience,
})
}
}
/// Key for storing Firebase tokens in the token cache.
#[derive(Debug, PartialEq, Eq)]
pub struct FirebaseTokenKey {
auth_provider_type: String,
user_profile_id: String,
api_key: String,
}
impl CacheKey for FirebaseTokenKey {
fn auth_provider_type(&self) -> &str {
&self.auth_provider_type
}
fn user_profile_id(&self) -> &str {
&self.user_profile_id
}
fn subkey(&self) -> &str {
&self.api_key
}
}
impl KeyFor for FirebaseTokenKey {
type TokenType = FirebaseAuthToken;
}
impl FirebaseTokenKey {
/// Creates a new Firebase token key.
pub fn new(
auth_provider_type: String,
user_profile_id: String,
api_key: String,
) -> Result<FirebaseTokenKey, Error> {
validate_provider_and_id(&auth_provider_type, &user_profile_id)?;
Ok(FirebaseTokenKey {
auth_provider_type: auth_provider_type,
user_profile_id: user_profile_id,
api_key: api_key,
})
}
}
/// Validates that the given auth_provider_type and user_profile_id are
/// nonempty.
fn validate_provider_and_id(auth_provider_type: &str, user_profile_id: &str) -> Result<(), Error> {
if auth_provider_type.is_empty() {
Err(format_err!("auth_provider_type cannot be empty"))
} else if user_profile_id.is_empty() {
Err(format_err!("user_profile_id cannot be empty"))
} else {
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use fidl_fuchsia_auth::TokenType;
const LONG_EXPIRY: Duration = Duration::from_secs(3000);
const TEST_ACCESS_TOKEN: &str = "access token";
const TEST_FIREBASE_ID_TOKEN: &str = "firebase token";
const TEST_FIREBASE_LOCAL_ID: &str = "firebase local id";
const TEST_EMAIL: &str = "user@test.com";
const TEST_AUTH_PROVIDER_TYPE: &str = "test-provider";
const TEST_USER_PROFILE_ID: &str = "test-user-123";
const TEST_SCOPE_1: &str = "scope-1";
const TEST_SCOPE_2: &str = "scope-2";
const TEST_AUDIENCE: &str = "audience";
const TEST_FIREBASE_API: &str = "firebase-api";
#[test]
fn test_oauth_from_fidl() {
let fidl_type = fidl_fuchsia_auth::AuthToken {
token_type: TokenType::AccessToken,
expires_in: LONG_EXPIRY.as_secs(),
token: TEST_ACCESS_TOKEN.to_string(),
};
let time_before_conversion = SystemTime::now();
let native_type = OAuthToken::from(fidl_type);
let time_after_conversion = SystemTime::now();
assert_eq!(&native_type.token, TEST_ACCESS_TOKEN);
assert!(native_type.expiry_time >= time_before_conversion + LONG_EXPIRY);
assert!(native_type.expiry_time <= time_after_conversion + LONG_EXPIRY);
// Also verify our implementation of the Deref trait
assert_eq!(&*native_type, TEST_ACCESS_TOKEN);
}
#[test]
fn test_firebase_from_fidl() {
let fidl_type = fidl_fuchsia_auth::FirebaseToken {
id_token: TEST_FIREBASE_ID_TOKEN.to_string(),
local_id: Some(TEST_FIREBASE_LOCAL_ID.to_string()),
email: Some(TEST_EMAIL.to_string()),
expires_in: LONG_EXPIRY.as_secs(),
};
let time_before_conversion = SystemTime::now();
let native_type = FirebaseAuthToken::from(fidl_type);
let time_after_conversion = SystemTime::now();
assert_eq!(&native_type.id_token, TEST_FIREBASE_ID_TOKEN);
assert_eq!(native_type.local_id, Some(TEST_FIREBASE_LOCAL_ID.to_string()));
assert_eq!(native_type.email, Some(TEST_EMAIL.to_string()));
assert!(native_type.expiry_time >= time_before_conversion + LONG_EXPIRY);
assert!(native_type.expiry_time <= time_after_conversion + LONG_EXPIRY);
}
#[test]
fn test_firebase_to_fidl() {
let time_before_conversion = SystemTime::now();
let native_type = FirebaseAuthToken {
id_token: TEST_FIREBASE_ID_TOKEN.to_string(),
local_id: Some(TEST_FIREBASE_LOCAL_ID.to_string()),
email: Some(TEST_EMAIL.to_string()),
expiry_time: time_before_conversion + LONG_EXPIRY,
};
let fidl_type = native_type.to_fidl();
let elapsed_time_during_conversion =
SystemTime::now().duration_since(time_before_conversion).unwrap();
assert_eq!(&fidl_type.id_token, TEST_FIREBASE_ID_TOKEN);
assert_eq!(fidl_type.local_id, Some(TEST_FIREBASE_LOCAL_ID.to_string()));
assert_eq!(fidl_type.email, Some(TEST_EMAIL.to_string()));
assert!(fidl_type.expires_in <= LONG_EXPIRY.as_secs());
assert!(
fidl_type.expires_in
>= (LONG_EXPIRY.as_secs() - elapsed_time_during_conversion.as_secs()) - 1
);
}
#[test]
fn test_create_access_token_key() {
let scopes = vec![TEST_SCOPE_1, TEST_SCOPE_2];
let auth_token_key = AccessTokenKey::new(
TEST_AUTH_PROVIDER_TYPE.to_string(),
TEST_USER_PROFILE_ID.to_string(),
&scopes,
)
.unwrap();
assert_eq!(
AccessTokenKey {
auth_provider_type: TEST_AUTH_PROVIDER_TYPE.to_string(),
user_profile_id: TEST_USER_PROFILE_ID.to_string(),
scopes: TEST_SCOPE_1.to_string() + "\n" + TEST_SCOPE_2,
},
auth_token_key
);
// Verify single scope creation
let single_scope = vec![TEST_SCOPE_1];
let auth_token_key = AccessTokenKey::new(
TEST_AUTH_PROVIDER_TYPE.to_string(),
TEST_USER_PROFILE_ID.to_string(),
&single_scope,
)
.unwrap();
assert_eq!(
AccessTokenKey {
auth_provider_type: TEST_AUTH_PROVIDER_TYPE.to_string(),
user_profile_id: TEST_USER_PROFILE_ID.to_string(),
scopes: TEST_SCOPE_1.to_string(),
},
auth_token_key
);
// Verify no scopes creation
let no_scopes: Vec<&str> = vec![];
let auth_token_key = AccessTokenKey::new(
TEST_AUTH_PROVIDER_TYPE.to_string(),
TEST_USER_PROFILE_ID.to_string(),
&no_scopes,
)
.unwrap();
assert_eq!(
AccessTokenKey {
auth_provider_type: TEST_AUTH_PROVIDER_TYPE.to_string(),
user_profile_id: TEST_USER_PROFILE_ID.to_string(),
scopes: "".to_string(),
},
auth_token_key
);
// Verify empty auth provider and user profile id cases fail.
assert!(AccessTokenKey::new("".to_string(), TEST_USER_PROFILE_ID.to_string(), &no_scopes)
.is_err());
assert!(AccessTokenKey::new(
TEST_AUTH_PROVIDER_TYPE.to_string(),
"".to_string(),
&no_scopes
)
.is_err());
}
#[test]
fn test_create_id_token_key() {
assert_eq!(
IdTokenKey::new(
TEST_AUTH_PROVIDER_TYPE.to_string(),
TEST_USER_PROFILE_ID.to_string(),
TEST_AUDIENCE.to_string()
)
.unwrap(),
IdTokenKey {
auth_provider_type: TEST_AUTH_PROVIDER_TYPE.to_string(),
user_profile_id: TEST_USER_PROFILE_ID.to_string(),
audience: TEST_AUDIENCE.to_string()
}
);
// Verify empty auth provider and user profile id cases fail.
assert!(IdTokenKey::new(
"".to_string(),
TEST_USER_PROFILE_ID.to_string(),
TEST_AUDIENCE.to_string()
)
.is_err());
assert!(IdTokenKey::new(
TEST_AUTH_PROVIDER_TYPE.to_string(),
"".to_string(),
TEST_AUDIENCE.to_string()
)
.is_err());
}
#[test]
fn test_create_firebase_token_key() {
assert_eq!(
FirebaseTokenKey::new(
TEST_AUTH_PROVIDER_TYPE.to_string(),
TEST_USER_PROFILE_ID.to_string(),
TEST_FIREBASE_API.to_string()
)
.unwrap(),
FirebaseTokenKey {
auth_provider_type: TEST_AUTH_PROVIDER_TYPE.to_string(),
user_profile_id: TEST_USER_PROFILE_ID.to_string(),
api_key: TEST_FIREBASE_API.to_string()
}
);
// Verify empty auth provider and user profile id cases fail.
assert!(FirebaseTokenKey::new(
"".to_string(),
TEST_USER_PROFILE_ID.to_string(),
TEST_FIREBASE_API.to_string()
)
.is_err());
assert!(FirebaseTokenKey::new(
TEST_AUTH_PROVIDER_TYPE.to_string(),
"".to_string(),
TEST_FIREBASE_API.to_string()
)
.is_err());
}
}
|
//! Local storage.
//!
//! Currently this consists of a Sqlite backend implementation.
// This is intended for internal use only -- do not make public.
mod prelude;
mod connection;
pub mod fake;
mod params;
mod schema;
pub mod test_utils;
use std::num::NonZeroU32;
use std::path::{Path, PathBuf};
use std::sync::Arc;
pub use connection::*;
use pathfinder_common::{BlockHash, BlockNumber};
use rusqlite::functions::FunctionFlags;
use anyhow::Context;
use r2d2::Pool;
use r2d2_sqlite::SqliteConnectionManager;
/// Sqlite key used for the PRAGMA user version.
const VERSION_KEY: &str = "user_version";
/// Specifies the [journal mode](https://sqlite.org/pragma.html#pragma_journal_mode)
/// of the [Storage].
#[derive(Clone, Copy)]
pub enum JournalMode {
Rollback,
WAL,
}
/// Identifies a specific starknet block stored in the database.
///
/// Note that this excludes the `Pending` variant since we never store pending data
/// in the database.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum BlockId {
Latest,
Number(BlockNumber),
Hash(BlockHash),
}
impl From<BlockHash> for BlockId {
fn from(value: BlockHash) -> Self {
Self::Hash(value)
}
}
impl From<BlockNumber> for BlockId {
fn from(value: BlockNumber) -> Self {
Self::Number(value)
}
}
impl TryFrom<pathfinder_common::BlockId> for BlockId {
type Error = &'static str;
fn try_from(value: pathfinder_common::BlockId) -> Result<Self, Self::Error> {
match value {
pathfinder_common::BlockId::Number(x) => Ok(BlockId::Number(x)),
pathfinder_common::BlockId::Hash(x) => Ok(BlockId::Hash(x)),
pathfinder_common::BlockId::Latest => Ok(BlockId::Latest),
pathfinder_common::BlockId::Pending => {
Err("Pending is invalid within the storage context")
}
}
}
}
/// Used to create [Connection's](Connection) to the pathfinder database.
///
/// Intended usage:
/// - Use [Storage::migrate] to create the app's database.
/// - Pass the [Storage] (or clones thereof) to components which require database access.
/// - Use [Storage::connection] to create connection's to the database, which can in turn
/// be used to interact with the various [tables](self).
#[derive(Clone)]
pub struct Storage(Inner);
#[derive(Clone)]
struct Inner {
/// Uses [`Arc`] to allow _shallow_ [Storage] cloning
database_path: Arc<PathBuf>,
pool: Pool<SqliteConnectionManager>,
}
pub struct StorageManager(PathBuf);
impl StorageManager {
pub fn create_pool(&self, capacity: NonZeroU32) -> anyhow::Result<Storage> {
let pool_manager = SqliteConnectionManager::file(&self.0).with_init(setup_connection);
let pool = Pool::builder()
.max_size(capacity.get())
.build(pool_manager)?;
Ok(Storage(Inner {
database_path: Arc::new(self.0.clone()),
pool,
}))
}
}
impl Storage {
/// Performs the database schema migration and returns a [storage manager](StorageManager).
///
/// This should be called __once__ at the start of the application,
/// and passed to the various components which require access to the database.
///
/// Panics if u32
pub fn migrate(
database_path: PathBuf,
journal_mode: JournalMode,
) -> anyhow::Result<StorageManager> {
let mut connection = rusqlite::Connection::open(&database_path)
.context("Opening DB for setting journal mode")?;
setup_connection(&mut connection).context("Setting up database connection")?;
setup_journal_mode(&mut connection, journal_mode).context("Setting journal mode")?;
migrate_database(&mut connection).context("Migrate database")?;
connection
.close()
.map_err(|(_connection, error)| error)
.context("Closing DB after setting journal mode")?;
Ok(StorageManager(database_path))
}
/// Returns a new Sqlite [Connection] to the database.
pub fn connection(&self) -> anyhow::Result<Connection> {
let conn = self.0.pool.get()?;
Ok(Connection::from_inner(conn))
}
/// Convenience function for tests to create an in-memory database.
/// Equivalent to [Storage::migrate] with an in-memory backed database.
// No longer cfg(test) because needed in benchmarks
pub fn in_memory() -> anyhow::Result<Self> {
// Create a unique database name so that they are not shared between
// concurrent tests. i.e. Make every in-mem Storage unique.
lazy_static::lazy_static!(
static ref COUNT: std::sync::Mutex<u64> = Default::default();
);
let unique_mem_db = {
let mut count = COUNT.lock().unwrap();
// &cache=shared allows other threads to see and access the inmemory database
let unique_mem_db = format!("file:memdb{count}?mode=memory&cache=shared");
*count += 1;
unique_mem_db
};
let database_path = PathBuf::from(unique_mem_db);
// This connection must be held until a pool has been created, since an
// in-memory database is dropped once all its connections are. This connection
// therefore holds the database in-place until the pool is established.
let _conn = rusqlite::Connection::open(&database_path)?;
let storage = Self::migrate(database_path, JournalMode::Rollback)?;
storage.create_pool(NonZeroU32::new(5).unwrap())
}
pub fn path(&self) -> &Path {
&self.0.database_path
}
}
fn setup_journal_mode(
connection: &mut rusqlite::Connection,
journal_mode: JournalMode,
) -> Result<(), rusqlite::Error> {
// set journal mode related pragmas
match journal_mode {
JournalMode::Rollback => connection.pragma_update(None, "journal_mode", "DELETE"),
JournalMode::WAL => {
connection.pragma_update(None, "journal_mode", "WAL")?;
// set journal size limit to 1 GB
connection.pragma_update(
None,
"journal_size_limit",
(1024usize * 1024 * 1024).to_string(),
)?;
// According to the documentation NORMAL is a good choice for WAL mode.
connection.pragma_update(None, "synchronous", "normal")
}
}
}
fn setup_connection(connection: &mut rusqlite::Connection) -> Result<(), rusqlite::Error> {
// enable foreign keys
connection.set_db_config(
rusqlite::config::DbConfig::SQLITE_DBCONFIG_ENABLE_FKEY,
true,
)?;
connection.create_scalar_function(
"base64_felts_to_index_prefixed_base32_felts",
1,
FunctionFlags::SQLITE_UTF8 | FunctionFlags::SQLITE_DETERMINISTIC,
move |ctx| {
assert_eq!(ctx.len(), 1, "called with unexpected number of arguments");
let base64_felts = ctx
.get_raw(0)
.as_str()
.map_err(|e| rusqlite::Error::UserFunctionError(e.into()))?;
Ok(base64_felts_to_index_prefixed_base32_felts(base64_felts))
},
)?;
Ok(())
}
fn base64_felts_to_index_prefixed_base32_felts(base64_felts: &str) -> String {
let strings = base64_felts
.split(' ')
// Convert only the first 256 elements so that the index fits into one u8
// we will use as a prefix byte.
.take(connection::EVENT_KEY_FILTER_LIMIT)
.enumerate()
.map(|(index, key)| {
let mut buf: [u8; 33] = [0u8; 33];
buf[0] = index as u8;
base64::decode_config_slice(key, base64::STANDARD, &mut buf[1..]).unwrap();
data_encoding::BASE32_NOPAD.encode(&buf)
})
.collect::<Vec<_>>();
strings.join(" ")
}
/// Migrates the database to the latest version. This __MUST__ be called
/// at the beginning of the application.
fn migrate_database(connection: &mut rusqlite::Connection) -> anyhow::Result<()> {
let mut current_revision = schema_version(connection)?;
let migrations = schema::migrations();
// The target version is the number of null migrations which have been replaced
// by the base schema + the new migrations built on top of that.
let latest_revision = schema::BASE_SCHEMA_REVISION + migrations.len();
// Apply the base schema if the database is new.
if current_revision == 0 {
let tx = connection
.transaction()
.context("Create database transaction")?;
schema::base_schema(&tx).context("Applying base schema")?;
tx.pragma_update(None, VERSION_KEY, schema::BASE_SCHEMA_REVISION)
.context("Failed to update the schema version number")?;
tx.commit().context("Commit migration transaction")?;
current_revision = schema::BASE_SCHEMA_REVISION;
}
// Skip migration if we already at latest.
if current_revision == latest_revision {
tracing::info!(%current_revision, "No database migrations required");
return Ok(());
}
// Check for database version compatibility.
if current_revision < schema::BASE_SCHEMA_REVISION {
tracing::error!(
version=%current_revision,
limit=%schema::BASE_SCHEMA_REVISION,
"Database version is too old to migrate"
);
anyhow::bail!("Database version {current_revision} too old to migrate");
}
if current_revision > latest_revision {
tracing::error!(
version=%current_revision,
limit=%latest_revision,
"Database version is from a newer than this application expected"
);
anyhow::bail!(
"Database version {current_revision} is newer than this application expected {latest_revision}",
);
}
let amount = latest_revision - current_revision;
tracing::info!(%current_revision, %latest_revision, migrations=%amount, "Performing database migrations");
// Sequentially apply each missing migration.
migrations
.iter()
.rev()
.take(amount)
.rev()
.try_for_each(|migration| {
let mut do_migration = || -> anyhow::Result<()> {
current_revision += 1;
let span = tracing::info_span!("db_migration", revision = current_revision);
let _enter = span.enter();
let transaction = connection
.transaction()
.context("Create database transaction")?;
migration(&transaction)?;
transaction
.pragma_update(None, VERSION_KEY, current_revision)
.context("Failed to update the schema version number")?;
transaction
.commit()
.context("Commit migration transaction")?;
Ok(())
};
do_migration().with_context(|| format!("Migrating to {current_revision}"))
})?;
Ok(())
}
/// Returns the current schema version of the existing database,
/// or `0` if database does not yet exist.
fn schema_version(connection: &rusqlite::Connection) -> anyhow::Result<usize> {
// We store the schema version in the Sqlite provided PRAGMA "user_version",
// which stores an INTEGER and defaults to 0.
let version = connection.query_row(
&format!("SELECT {VERSION_KEY} FROM pragma_user_version;"),
[],
|row| row.get::<_, usize>(0),
)?;
Ok(version)
}
#[cfg(test)]
mod tests {
use pathfinder_common::felt;
use stark_hash::Felt;
use super::*;
#[test]
fn schema_version_defaults_to_zero() {
let mut conn = rusqlite::Connection::open_in_memory().unwrap();
let transaction = conn.transaction().unwrap();
let version = schema_version(&transaction).unwrap();
assert_eq!(version, 0);
}
#[test]
fn full_migration() {
let mut conn = rusqlite::Connection::open_in_memory().unwrap();
setup_connection(&mut conn).unwrap();
migrate_database(&mut conn).unwrap();
let version = schema_version(&conn).unwrap();
let expected = schema::migrations().len() + schema::BASE_SCHEMA_REVISION;
assert_eq!(version, expected);
}
#[test]
fn migration_fails_if_db_is_newer() {
let mut conn = rusqlite::Connection::open_in_memory().unwrap();
setup_connection(&mut conn).unwrap();
// Force the schema to a newer version
let current_version = schema::migrations().len();
conn.pragma_update(None, VERSION_KEY, current_version + 1)
.unwrap();
// Migration should fail.
migrate_database(&mut conn).unwrap_err();
}
#[test]
fn foreign_keys_are_enforced() {
let conn = rusqlite::Connection::open_in_memory().unwrap();
// We first disable foreign key support. Sqlite currently enables this by default,
// but this may change in the future. So we disable to check that our enable function
// works regardless of what Sqlite's default is.
use rusqlite::config::DbConfig::SQLITE_DBCONFIG_ENABLE_FKEY;
conn.set_db_config(SQLITE_DBCONFIG_ENABLE_FKEY, false)
.unwrap();
// Enable foreign key support.
conn.set_db_config(SQLITE_DBCONFIG_ENABLE_FKEY, true)
.unwrap();
// Create tables with a parent-child foreign key requirement.
conn.execute_batch(
r"
CREATE TABLE parent(
id INTEGER PRIMARY KEY
);
CREATE TABLE child(
id INTEGER PRIMARY KEY,
parent_id INTEGER NOT NULL REFERENCES parent(id)
);
",
)
.unwrap();
// Check that foreign keys are enforced.
conn.execute("INSERT INTO parent (id) VALUES (2)", [])
.unwrap();
conn.execute("INSERT INTO child (id, parent_id) VALUES (0, 2)", [])
.unwrap();
conn.execute("INSERT INTO child (id, parent_id) VALUES (1, 1)", [])
.unwrap_err();
}
#[test]
fn felts_to_index_prefixed_base32_strings() {
let input: String = [felt!("0x901823"), felt!("0x901823"), felt!("0x901825")]
.iter()
.map(|f| base64::encode(f.as_be_bytes()))
.collect::<Vec<_>>()
.join(" ");
assert_eq!(
super::base64_felts_to_index_prefixed_base32_felts(&input),
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASAMCG AEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASAMCG AIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASAMCK".to_owned()
);
}
#[test]
fn felts_to_index_prefixed_base32_strings_encodes_the_first_256_felts() {
let input = [Felt::ZERO; 257]
.iter()
.map(|f| base64::encode(f.as_be_bytes()))
.collect::<Vec<_>>()
.join(" ");
let output = super::base64_felts_to_index_prefixed_base32_felts(&input);
assert_eq!(output.split(' ').count(), 256);
}
#[test]
fn rpc_test_db_is_migrated() {
let mut source_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
source_path.push("../rpc/fixtures/mainnet.sqlite");
let db_dir = tempfile::TempDir::new().unwrap();
let mut db_path = PathBuf::from(db_dir.path());
db_path.push("mainnet.sqlite");
std::fs::copy(&source_path, &db_path).unwrap();
let database = rusqlite::Connection::open(db_path).unwrap();
let version = schema_version(&database).unwrap();
let expected = schema::migrations().len() + schema::BASE_SCHEMA_REVISION;
assert_eq!(version, expected, "RPC database fixture needs migrating");
}
}
|
#![deny(missing_docs)]
#![cfg_attr(docsrs, feature(doc_cfg))]
//! Coi provides an easy to use dependency injection framework.
//! Currently, this crate provides the following:
//! - **[`coi::Inject` (trait)]** - a marker trait that indicates a trait or struct is injectable.
//! - **[`coi::Provide` (trait)]** - a trait that indicates a struct is capable of providing a specific
//! implementation of some injectable trait. This is generated for you if you use
//! [`coi::Inject` (derive)] or [`coi::Provide` (derive)], but can also be written manually.
//! - **[`coi::Container`]** - a container to manage the lifetime of all dependencies. This is still
//! in its early stages, and currently only supports objects that are recreated with each request to
//! [`coi::Container::resolve`].
//! - **[`coi::ContainerBuilder`]** - a builder for the above container to simplify construction and
//! guarantee immutability after construction.
//!
//! [`coi::Inject` (trait)]: trait.Inject.html
//! [`coi::Inject` (derive)]: derive.Inject.html
//! [`coi::Provide` (trait)]: trait.Provide.html
//! [`coi::Provide` (derive)]: derive.Provide.html
//! [`coi::Container`]: struct.Container.html
//! [`coi::Container::resolve`]: struct.Container.html#method.resolve
//! [`coi::ContainerBuilder`]: struct.ContainerBuilder.html
//!
//! # Example
//!
//! ```rust
//! use coi::{container, Inject};
//! use std::sync::Arc;
//!
//! // Mark injectable traits by inheriting the `Inject` trait.
//! trait Trait1: Inject {
//! fn describe(&self) -> &'static str;
//! }
//!
//! // For structs that will provide the implementation of an injectable trait, derive `Inject`
//! // and specify which expr will be used to inject which trait. The method can be any path.
//! // The arguments for the method are derived from fields marked with the attribute
//! // `#[coi(inject)]` (See Impl2 below).
//! #[derive(Inject)]
//! #[coi(provides dyn Trait1 with Impl1)]
//! struct Impl1;
//!
//! // Don't forget to actually implement the trait.
//! impl Trait1 for Impl1 {
//! fn describe(&self) -> &'static str {
//! "I'm impl1!"
//! }
//! }
//!
//! // Mark injectable traits by inheriting the `Inject` trait.
//! trait Trait2: Inject {
//! fn deep_describe(&self) -> String;
//! }
//!
//! // For structs that will provide the implementation of an injectable trait, derive `Inject`
//! // and specify which method will be used to inject which trait. The arguments for the method
//! // are derived from fields marked with the attribute `#[coi(inject)]`, so the parameter name
//! // must match a field name.
//! #[derive(Inject)]
//! #[coi(provides dyn Trait2 with Impl2::new(trait1))]
//! struct Impl2 {
//! // The name of the field is important! It must match the name that's registered in the
//! // container when the container is being built! This is similar to the behavior of
//! // dependency injection libraries in other languages.
//! #[coi(inject)]
//! trait1: Arc<dyn Trait1>,
//! }
//!
//! // Implement the provider method
//! impl Impl2 {
//! // Note: The param name here doesn't actually matter.
//! fn new(trait1: Arc<dyn Trait1>) -> Self {
//! Self { trait1 }
//! }
//! }
//!
//! // Again, don't forget to actually implement the trait.
//! impl Trait2 for Impl2 {
//! fn deep_describe(&self) -> String {
//! format!("I'm impl2! and I have {}", self.trait1.describe())
//! }
//! }
//!
//! // "Provider" structs are automatically generated through the `Inject` attribute. They
//! // append `Provider` to the name of the struct that is being derive (make sure you don't
//! // have any structs with the same name or your code will fail to compile.
//! // Reminder: Make sure you use the same key here as the field names of the structs that
//! // require these impls.
//! let mut container = container! {
//! trait1 => Impl1Provider,
//! trait2 => Impl2Provider,
//! };
//!
//! // Once the container is built, you can now resolve any particular instance by its key and
//! // the trait it provides. This crate currently only supports `Arc<dyn Trait>`, but this may
//! // be expanded in a future version of the crate.
//! let trait2 = container
//! // Note: Getting the key wrong will produce an error telling you which key in the
//! // chain of dependencies caused the failure (future versions might provider a vec of
//! // chain that lead to the failure). Getting the type wrong will only tell you which key
//! // had the wrong type. This is because at runtime, we do not have any type information,
//! // only unique ids (that change during each compilation).
//! .resolve::<dyn Trait2>("trait2")
//! .expect("Should exist");
//! println!("Deep description: {}", trait2.deep_describe());
//! ```
//!
//! # How this crate works in more detail
//!
//! For any trait you wish to abstract over, have it inherit the `Inject` trait. For structs, impl
//! `Inject` for that struct, e.g.
//! ```rust
//! # use coi::Inject;
//! trait Trait1: Inject {}
//!
//! struct Struct1;
//!
//! impl Inject for Struct1 {}
//! ```
//!
//! Then, in order to register the injectable item with the [`coi::ContainerBuilder`], you also
//! need a struct that impls `Provide<Output = T>` where `T` is your trait or struct. `Provide`
//! exposes a `provide` fn that takes `&self` and `&Container`. When manually implementing `Provide`
//! you must resolve all dependencies with `container`. Here's an example below:
//!
//! ```rust
//! # use coi::{Container, Inject, Provide};
//! # use std::sync::{Arc, Mutex};
//! # trait Trait1: Inject {}
//! #
//! trait Dependency: Inject {}
//!
//! struct Impl1 {
//! dependency: Arc<dyn Dependency>,
//! }
//!
//! impl Impl1 {
//! fn new(dependency: Arc<dyn Dependency>) -> Self {
//! Self { dependency }
//! }
//! }
//!
//! impl Inject for Impl1 {}
//!
//! impl Trait1 for Impl1 {}
//!
//! struct Trait1Provider;
//!
//! impl Provide for Trait1Provider {
//! type Output = dyn Trait1;
//!
//! fn provide(&self, container: &Container) -> coi::Result<Arc<Self::Output>> {
//! let dependency = container.resolve::<dyn Dependency>("dependency")?;
//! Ok(Arc::new(Impl1::new(dependency)) as Arc<dyn Trait1>)
//! }
//! }
//! ```
//!
//! The `"dependency"` above of course needs to be registered in order for the call
//! to `resolve` to not error out:
//!
//! ```rust
//! # use coi::{container, Container, Inject, Provide};
//! # use std::sync::Arc;
//! # trait Trait1: Inject {}
//! # trait Dependency: Inject {}
//! #
//! # struct Impl1 {
//! # dependency: Arc<dyn Dependency>,
//! # }
//! # impl Impl1 {
//! # fn new(dependency: Arc<dyn Dependency>) -> Self {
//! # Self { dependency }
//! # }
//! # }
//! # impl Inject for Impl1 {}
//! # impl Trait1 for Impl1 {}
//! #
//! # struct Trait1Provider;
//! #
//! # impl Provide for Trait1Provider {
//! # type Output = dyn Trait1;
//! # fn provide(&self, container: &Container) -> coi::Result<Arc<Self::Output>> {
//! # let dependency = container.resolve::<dyn Dependency>("dependency")?;
//! # Ok(Arc::new(Impl1::new(dependency)) as Arc<dyn Trait1>)
//! # }
//! # }
//! struct DepImpl;
//!
//! impl Dependency for DepImpl {}
//!
//! impl Inject for DepImpl {}
//!
//! struct DependencyProvider;
//!
//! impl Provide for DependencyProvider {
//! type Output = dyn Dependency;
//!
//! fn provide(&self, _: &Container) -> coi::Result<Arc<Self::Output>> {
//! Ok(Arc::new(DepImpl) as Arc<dyn Dependency>)
//! }
//! }
//!
//! let mut container = container! {
//! trait1 => Trait1Provider,
//! dependency => DependencyProvider,
//! };
//! let trait1 = container.resolve::<dyn Trait1>("trait1");
//! ```
//!
//! In general, you usually won't want to write all of that. You would instead want to use the
//! procedural macro (see example above).
//! The detailed docs for that are at [`coi::Inject` (derive)]
//!
//! # Debugging
//!
//! To turn on debugging features, enable the `debug` feature (see below), then you'll have access
//! to the following changes:
//!
//! * Formatting a container with `{:?}` will also list the dependencies (in A: Vec<B> style)
//! * `Container` will get an [`analyze`] fn, which will return an error if any misconfiguration is
//! detected. See the docs for [`analyze`] for more details.
//! * `Container` will get a [`dot_graph`] fn, which will return a string that can be passed to
//! [graphviz]'s dot command to generate a graph. The image below was generated with the sample
//! project that's in this crate's repository (output saved to `deps.dot` then ran
//! `dot -Tsvg deps.dot -o deps.svg `):
//!
//! <div>
//! <svg width="168pt" height="188pt"
//! viewBox="0.00 0.00 167.89 188.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
//! <g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 184)">
//! <title>%3</title>
//! <polygon fill="#ffffff" stroke="transparent" points="-4,4 -4,-184 163.8858,-184 163.8858,4 -4,4"/>
//! <!-- 0 -->
//! <g id="node1" class="node">
//! <title>0</title>
//! <ellipse fill="none" stroke="#000000" cx="79.9429" cy="-18" rx="67.6881" ry="18"/>
//! <text text-anchor="middle" x="79.9429" y="-14.3" font-family="Times,serif" font-size="14.00" fill="#000000">Singleton - pool</text>
//! </g>
//! <!-- 1 -->
//! <g id="node2" class="node">
//! <title>1</title>
//! <ellipse fill="none" stroke="#000000" cx="79.9429" cy="-90" rx="79.8859" ry="18"/>
//! <text text-anchor="middle" x="79.9429" y="-86.3" font-family="Times,serif" font-size="14.00" fill="#000000">Scoped - repository</text>
//! </g>
//! <!-- 1->0 -->
//! <g id="edge1" class="edge">
//! <title>1->0</title>
//! <path fill="none" stroke="#000000" d="M79.9429,-71.8314C79.9429,-64.131 79.9429,-54.9743 79.9429,-46.4166"/>
//! <polygon fill="#000000" stroke="#000000" points="83.443,-46.4132 79.9429,-36.4133 76.443,-46.4133 83.443,-46.4132"/>
//! </g>
//! <!-- 2 -->
//! <g id="node3" class="node">
//! <title>2</title>
//! <ellipse fill="none" stroke="#000000" cx="79.9429" cy="-162" rx="69.5877" ry="18"/>
//! <text text-anchor="middle" x="79.9429" y="-158.3" font-family="Times,serif" font-size="14.00" fill="#000000">Scoped - service</text>
//! </g>
//! <!-- 2->1 -->
//! <g id="edge2" class="edge">
//! <title>2->1</title>
//! <path fill="none" stroke="#000000" d="M79.9429,-143.8314C79.9429,-136.131 79.9429,-126.9743 79.9429,-118.4166"/>
//! <polygon fill="#000000" stroke="#000000" points="83.443,-118.4132 79.9429,-108.4133 76.443,-118.4133 83.443,-118.4132"/>
//! </g>
//! </g>
//! </svg>
//! </div>
//!
//! [`analyze`]: struct.Container.html#method.analyze
//! [`dot_graph`]: struct.Container.html#method.dot_graph
//! [graphviz]: https://www.graphviz.org/
//!
//! # Features
//!
//! Compilation taking too long? Turn off features you're not using.
//!
//! To not use the default:
//! ```toml
//! # Cargo.toml
//! [dependencies]
//! coi = { version = "...", default-features = false }
//! ```
//!
//! Why the #$*%T won't my container work!?
//!
//! To turn on debugging features:
//! ```toml
//! # Cargo.toml
//! [dependencies]
//! coi = { version = "...", default-features = false, features = ["debug"] }
//! ```
//!
//! - default: `derive` - Procedural macros are re-exported.
//! - debug: `Debug` impl
//! - None - Procedural macros are not re-exported.
//!
//! # Help
//!
//! ## External traits
//!
//! Want to inject a trait that's not marked `Inject`? There's a very simple solution!
//! It works even if the intended trait is not part of your crate.
//! ```rust
//! # use coi::Inject;
//! // other.rs
//! pub trait Trait {
//! # /*
//! ...
//! # */
//! }
//!
//! // your_lib.rs
//! # /*
//! use coi::Inject;
//! use other::Trait;
//! # */
//!
//! // Just inherit the intended trait and `Inject` on a trait in your crate,
//! // and make sure to also impl both traits for the intended provider.
//! pub trait InjectableTrait : Trait + Inject {}
//!
//! #[derive(Inject)]
//! #[coi(provides pub dyn InjectableTrait with Impl{})]
//! struct Impl {
//! # /*
//! ...
//! # */
//! }
//!
//! impl Trait for Impl {
//! # /*
//! ...
//! # */
//! }
//!
//! impl InjectableTrait for Impl {}
//! ```
//!
//! ## Where are the factory registrations!?
//!
//! If you're familiar with dependency injection in other languages, you might
//! be used to factory registration where you can provide a method/closure/lambda/etc.
//! during registration. Since the crate works off of the `Provide` trait, you would
//! have to manually implement `Provide` for your factory method. This would also
//! require you to manually retrieve your dependencies from the passed in `Container`
//! as shown in the docs above.
//!
//! ## Why can't I derive `Inject` when my struct contains a reference?
//!
//! In order to store all of the resolved types, we have to use
//! [`std::any::Any`], which, unfortunately, has the restriction `Any: 'static`.
//! This is because it's not yet known if there's a safe way to downcast to a
//! type with a reference (See the comments in this [tracking issue]).
//!
//! [`std::any::Any`]: https://doc.rust-lang.org/std/any/trait.Any.html
//! [tracking issue]: https://github.com/rust-lang/rust/issues/41875
use rustc_hash::FxHashMap as HashMap;
use std::any::Any;
use std::sync::{Arc, Mutex};
#[cfg(any(feature = "derive", feature = "debug"))]
pub use coi_derive::*;
#[cfg(feature = "debug")]
use petgraph::{
algo::toposort,
graph::{DiGraph, NodeIndex},
};
#[cfg(feature = "debug")]
use std::fmt::{self, Debug};
/// Errors produced by this crate
#[derive(Debug, thiserror::Error)]
pub enum Error {
/// This key was not found in the container. Either the requested resource was never registered
/// with this container, or there is a typo in the register or resolve calls.
#[error("Key not found: {0}")]
KeyNotFound(String),
/// The requested key was found in the container, but its type did not match the requested type.
#[error("Type mismatch for key: {0}")]
TypeMismatch(String),
/// Wrapper around errors produced by `Provider`s.
#[error("Inner error: {0}")]
Inner(#[from] Box<dyn std::error::Error + Send + Sync + 'static>),
}
/// Type alias to `Result<T, coi::Error>`.
pub type Result<T> = std::result::Result<T, Error>;
/// A marker trait for injectable traits and structs.
pub trait Inject: Send + Sync + 'static {}
impl<T: Inject + ?Sized> Inject for Arc<T> {}
/// Control when `Container` will call `Provide::provide`.
#[derive(Copy, Clone, Debug)]
pub enum RegistrationKind {
/// `Container` will construct a new instance of `T` for every invocation
/// of `Container::resolve`.
///
/// # Example
/// ```rust
/// # use coi::{container, Inject, Result};
/// # use std::ops::Deref;
/// # trait Trait: Inject {}
/// # #[derive(Inject)]
/// # #[coi(provides dyn Trait with Impl)]
/// # struct Impl;
/// # impl Trait for Impl {}
/// # fn the_test() -> Result<()> {
/// let mut container = container! {
/// // same as trait => ImplProvider.transient
/// trait => ImplProvider
/// };
///
/// let instance_1 = container.resolve::<dyn Trait>("trait")?;
/// let instance_2 = container.resolve::<dyn Trait>("trait")?;
///
/// // Every instance resolved from the container will be a distinct instance.
/// assert_ne!(
/// instance_1.deref() as &dyn Trait as *const _,
/// instance_2.deref() as &dyn Trait as *const _
/// );
/// # Ok(())
/// # }
/// # the_test().unwrap()
/// ```
Transient,
/// `Container` will construct a new instance of `T` for each scope
/// container created through `Container::scoped`.
///
/// # Example
/// ```rust
/// # use coi::{container, Inject, Result};
/// # use std::{ops::Deref, sync::{Arc, Mutex}};
/// # trait Trait: Inject {}
/// # #[derive(Inject)]
/// # #[coi(provides dyn Trait with Impl)]
/// # struct Impl;
/// # impl Trait for Impl {}
/// # fn the_test() -> Result<()> {
/// let container = container! {
/// trait => ImplProvider; scoped
/// };
///
/// // Every instance resolved within the same scope will be the same instance.
/// let instance_1 = container.resolve::<dyn Trait>("trait")?;
/// let instance_2 = container.resolve::<dyn Trait>("trait")?;
/// assert_eq!(
/// instance_1.deref() as &dyn Trait as *const _,
/// instance_2.deref() as &dyn Trait as *const _
/// );
/// {
/// let scoped = container.scoped();
/// let instance_3 = scoped.resolve::<dyn Trait>("trait")?;
///
/// // Since these two were resolved in different scopes, they will never be the
/// // same instance.
/// assert_ne!(
/// instance_1.deref() as &dyn Trait as *const _,
/// instance_3.deref() as &dyn Trait as *const _
/// );
/// }
/// # Ok(())
/// # }
/// # the_test().unwrap()
/// ```
Scoped,
/// The container will construct a single instance of `T` and reuse it
/// throughout all scopes.
///
/// # Example
/// ```rust
/// # use coi::{container, Inject, Result};
/// # use std::{ops::Deref, sync::{Arc, Mutex}};
/// # trait Trait: Inject {}
/// # #[derive(Inject)]
/// # #[coi(provides dyn Trait with Impl)]
/// # struct Impl;
/// # impl Trait for Impl {}
/// # fn the_test() -> Result<()> {
/// let container = container! {
/// trait => ImplProvider; singleton
/// };
///
/// let instance_1 = container.resolve::<dyn Trait>("trait")?;
/// let instance_2 = container.resolve::<dyn Trait>("trait")?;
///
/// assert_eq!(
/// instance_1.deref() as &dyn Trait as *const _,
/// instance_2.deref() as &dyn Trait as *const _
/// );
/// {
/// let scoped = container.scoped();
/// let instance_3 = scoped.resolve::<dyn Trait>("trait")?;
///
/// // Regardless of what scope the instance was resolved it, it will always
/// // be the same instance.
/// assert_eq!(
/// instance_1.deref() as &dyn Trait as *const _,
/// instance_3.deref() as &dyn Trait as *const _
/// );
/// }
/// # Ok(())
/// # }
/// # the_test().unwrap()
/// ```
Singleton,
}
/// A struct used to provide a registration to a container. It wraps a registration kind and
/// a provider.
#[derive(Clone, Debug)]
pub struct Registration<T> {
kind: RegistrationKind,
provider: T,
}
impl<T> Registration<T> {
/// Constructor for `Registration`. For it to be useful, `T` should impl `Provide`.
pub fn new(kind: RegistrationKind, provider: T) -> Self {
Self { kind, provider }
}
}
#[derive(Clone, Debug)]
struct InnerContainer {
provider_map: HashMap<&'static str, Registration<Arc<dyn Any + Send + Sync>>>,
resolved_map: HashMap<&'static str, Arc<dyn Any + Send + Sync>>,
parent: Option<Container>,
#[cfg(feature = "debug")]
dependency_map: HashMap<&'static str, &'static [&'static str]>,
}
impl InnerContainer {
fn check_resolved<T>(&self, key: &'static str) -> Option<Result<Arc<T>>>
where
T: Inject + ?Sized,
{
self.resolved_map.get(key).map(|v| {
v.downcast_ref::<Arc<T>>()
.map(Arc::clone)
.ok_or_else(|| Error::TypeMismatch(key.to_owned()))
})
}
}
/// A struct that manages all injected types.
#[derive(Clone, Debug)]
pub struct Container(Arc<Mutex<InnerContainer>>);
/// Possible errors generated when running [`Container::analyze`].
///
/// [`Container::analyze`]: struct.Container.html#method.analyze
#[cfg(feature = "debug")]
#[cfg_attr(docsrs, doc(cfg(feature = "debug")))]
#[derive(Debug, thiserror::Error)]
pub enum AnalysisError {
// FIXME(pfaria), it would be better if we could trace the
// entire cycle and store a Vec<String> here. Might require
// manually calling petgraph::visit::depth_first_search
/// There is a cyclic dependency within the container
#[error("Cycle detected at node `{0}`")]
Cycle(&'static str),
/// There is a missing dependency. Param 0 depends on Param 1, and Param 1 is missing.
#[error("Node `{0}` depends on `{1}`, the latter of which is not registered")]
Missing(&'static str, &'static str),
}
#[cfg(feature = "debug")]
#[derive(Clone, Default)]
struct AnalysisNode {
registration: Option<RegistrationKind>,
id: &'static str,
}
#[cfg(feature = "debug")]
impl fmt::Display for AnalysisNode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.registration {
Some(reg) => match reg {
RegistrationKind::Transient => write!(f, "Transient - {}", self.id),
RegistrationKind::Singleton => write!(f, "Singleton - {}", self.id),
RegistrationKind::Scoped => write!(f, "Scoped - {}", self.id),
},
None => write!(f, "MISSING - {}", self.id),
}
}
}
impl Container {
fn new(container: InnerContainer) -> Self {
Self(Arc::new(Mutex::new(container)))
}
/// Resolve an `Arc<T>` whose provider was previously registered with `key`.
pub fn resolve<T>(&self, key: &'static str) -> Result<Arc<T>>
where
T: Inject + ?Sized,
{
let (kind, provider) = {
let container = self.0.lock().unwrap();
// If we already have a resolved version, return it.
if let Some(resolved) = container.check_resolved::<T>(key) {
return resolved;
}
// Try to find the provider
let registration = match container.provider_map.get(key) {
Some(provider) => provider,
None => {
// If the key is not found, then we might be a child container. If we have a
// parent, then search it for a possibly valid provider.
return match &container.parent {
Some(parent) => {
let parent = parent.clone();
// Release the lock so we don't deadlock, this container isn't
// needed anymore
parent.resolve::<T>(key)
}
None => Err(Error::KeyNotFound(key.to_owned())),
};
}
};
(
registration.kind,
registration
.provider
.downcast_ref::<Arc<dyn Provide<Output = T> + Send + Sync + 'static>>()
.map(Arc::clone)
.ok_or_else(|| Error::TypeMismatch(key.to_owned()))?,
)
};
let provided = provider.provide(self);
match kind {
RegistrationKind::Transient => provided,
RegistrationKind::Scoped | RegistrationKind::Singleton => {
let mut container = self.0.lock().unwrap();
// Since there's a possibility for a deadlock right now, we want to make sure
// no one else already inserted into the resolved map (hence the call to entry).
Ok(container
.resolved_map
.entry(key)
.or_insert(Arc::new(provided?))
.downcast_ref::<Arc<T>>()
.map(Arc::clone)
.unwrap())
}
}
}
/// Produce a child container that only contains providers for scoped registrations
/// Any calls to resolve from the returned container can still use the `self` container
/// to resolve any other kinds of registrations.
pub fn scoped(&self) -> Container {
let container: &InnerContainer = &self.0.lock().unwrap();
Container::new(InnerContainer {
provider_map: container
.provider_map
.iter()
.filter_map(|(k, v)| match v.kind {
kind @ RegistrationKind::Scoped | kind @ RegistrationKind::Transient => Some((
*k,
Registration {
kind,
provider: Arc::clone(&v.provider),
},
)),
_ => None,
})
.collect(),
resolved_map: HashMap::default(),
// FIXME(pfaria) no clone here
#[cfg(feature = "debug")]
dependency_map: container.dependency_map.clone(),
parent: Some(self.clone()),
})
}
#[cfg(feature = "debug")]
fn dependency_graph(&self) -> DiGraph<AnalysisNode, AnalysisNode> {
let container = self.0.lock().unwrap();
let mut graph = DiGraph::<AnalysisNode, AnalysisNode>::new();
let mut key_to_node = container
.dependency_map
.keys()
.map(|k| -> (&'static str, NodeIndex) {
let kind = container.provider_map[k].kind;
let n = graph.add_node(AnalysisNode {
registration: Some(kind),
id: k,
});
(k, n)
})
.collect::<HashMap<&str, _>>();
for (k, deps) in &container.dependency_map {
let kn = key_to_node[k as &str];
let edges = deps
.iter()
.map(|dep| {
let vn = match key_to_node.get(dep) {
Some(vn) => *vn,
None => {
let vn = graph.add_node(AnalysisNode {
registration: None,
id: dep,
});
key_to_node.insert(dep, vn);
key_to_node[dep]
}
};
(kn, vn)
})
.collect::<Vec<_>>();
graph.extend_with_edges(&edges[..]);
}
graph
}
// FIXME(pfaria): Add analysis on singleton registrations that depend on
// non-singleton registration.
/// Run an analysis on a container and return any issues detected.
/// Current analysis performed:
/// - Missing dependencies
/// - Cyclic dependencies
#[cfg(feature = "debug")]
#[cfg_attr(docsrs, doc(cfg(feature = "debug")))]
pub fn analyze(&self) -> std::result::Result<(), Vec<AnalysisError>> {
use petgraph::Direction;
let graph = self.dependency_graph();
let mut errors = graph
.node_indices()
.filter(|i| graph[*i].registration.is_none())
.flat_map(|i| {
let to = &graph[i].id;
graph
.neighbors_directed(i, Direction::Incoming)
.map(|from| AnalysisError::Missing(graph[from].id, to))
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
// Do any cycles exist?
if let Err(cycle) = toposort(&graph, None) {
errors.push(AnalysisError::Cycle(graph[cycle.node_id()].id));
}
if !errors.is_empty() {
Err(errors)
} else {
Ok(())
}
}
/// Produces a dot format output that can be processed by the [graphviz] [`dot` (pdf)]
/// program to generate a graphical representation of the dependency graph.
///
/// [graphviz]: http://graphviz.org/
/// [`dot` (pdf)]: https://graphviz.gitlab.io/_pages/pdf/dotguide.pdf
#[cfg(feature = "debug")]
#[cfg_attr(docsrs, doc(cfg(feature = "debug")))]
pub fn dot_graph(&self) -> String {
use petgraph::dot::{Config, Dot};
let graph = self.dependency_graph();
format!("{}", Dot::with_config(&graph, &[Config::EdgeNoLabel]))
}
}
/// A builder used to construct a `Container`.
#[derive(Clone, Default)]
pub struct ContainerBuilder {
provider_map: HashMap<&'static str, Registration<Arc<dyn Any + Send + Sync>>>,
#[cfg(feature = "debug")]
dependency_map: HashMap<&'static str, &'static [&'static str]>,
}
impl ContainerBuilder {
/// Constructor for `ContainerBuilder`.
pub fn new() -> Self {
Self {
provider_map: HashMap::default(),
#[cfg(feature = "debug")]
dependency_map: HashMap::default(),
}
}
/// Register a `Provider` for `T` with identifier `key`.
#[inline]
pub fn register<P, T>(self, key: &'static str, provider: P) -> Self
where
T: Inject + ?Sized,
P: Provide<Output = T> + Send + Sync + 'static,
{
self.register_as(
key,
Registration::new(RegistrationKind::Transient, provider),
)
}
fn get_arc<P, T>(provider: P) -> Arc<dyn Provide<Output = T> + Send + Sync>
where
T: Inject + ?Sized,
P: Provide<Output = T> + Send + Sync + 'static,
{
Arc::new(provider)
}
/// Register a `Provider` for `T` with identifier `key`, while also specifying the resolution
/// behavior.
pub fn register_as<P, T>(mut self, key: &'static str, registration: Registration<P>) -> Self
where
T: Inject + ?Sized,
P: Provide<Output = T> + Send + Sync + 'static,
{
#[cfg(feature = "debug")]
let deps = registration.provider.dependencies();
self.provider_map.insert(
key,
Registration {
kind: registration.kind,
provider: Arc::new(Self::get_arc(registration.provider))
as Arc<dyn Any + Send + Sync>,
},
);
#[cfg(feature = "debug")]
self.dependency_map.insert(key, deps);
self
}
/// Consume this builder to produce a `Container`.
pub fn build(self) -> Container {
Container::new(InnerContainer {
provider_map: self.provider_map,
resolved_map: HashMap::default(),
parent: None,
#[cfg(feature = "debug")]
dependency_map: self.dependency_map,
})
}
}
/// A trait to manage the construction of an injectable trait or struct.
pub trait Provide {
/// The type that this provider will produce when resolved from a [`Container`].
///
/// [`Container`]: struct.Container.html
type Output: Inject + ?Sized;
/// Only intended to be used internally
fn provide(&self, container: &Container) -> Result<Arc<Self::Output>>;
/// Return list of dependencies
#[cfg(feature = "debug")]
#[cfg_attr(docsrs, doc(cfg(feature = "debug")))]
fn dependencies(&self) -> &'static [&'static str];
}
#[cfg(not(feature = "debug"))]
#[cfg_attr(docsrs, doc(cfg(not(feature = "debug"))))]
impl<T, F> Provide for F
where
F: Fn(&Container) -> Result<Arc<T>>,
T: Inject + ?Sized,
{
type Output = T;
fn provide(&self, container: &Container) -> Result<Arc<Self::Output>> {
self(container)
}
}
#[cfg(not(feature = "debug"))]
#[cfg_attr(docsrs, doc(cfg(not(feature = "debug"))))]
impl<T> Provide for dyn Fn(&Container) -> Result<Arc<T>>
where
T: Inject + ?Sized,
{
type Output = T;
fn provide(&self, container: &Container) -> Result<Arc<Self::Output>> {
self(container)
}
}
#[cfg(feature = "debug")]
#[cfg_attr(docsrs, doc(cfg(feature = "debug")))]
impl<T, F> Provide for (&'static [&'static str], F)
where
F: Fn(&Container) -> Result<Arc<T>>,
T: Inject + ?Sized,
{
type Output = T;
fn provide(&self, container: &Container) -> Result<Arc<Self::Output>> {
(self.1)(container)
}
fn dependencies(&self) -> &'static [&'static str] {
self.0
}
}
#[cfg(feature = "debug")]
#[cfg_attr(docsrs, doc(cfg(feature = "debug")))]
impl<T> Provide
for (
&'static [&'static str],
dyn Fn(&Container) -> Result<Arc<T>>,
)
where
T: Inject + ?Sized,
{
type Output = T;
fn provide(&self, container: &Container) -> Result<Arc<Self::Output>> {
(self.1)(container)
}
fn dependencies(&self) -> &'static [&'static str] {
self.0
}
}
/// A macro to simplify building of `Container`s.
///
/// It takes a list of key-value pairs, where the keys are converted to string
/// keys, and the values are converted into registrations. Transient, singleton
/// and scoped registrations are possible, with transient being the default:
/// ```rust
/// use coi::{container, Inject};
///
/// trait Dep: Inject {}
///
/// #[derive(Inject)]
/// #[coi(provides dyn Dep with Impl)]
/// struct Impl;
///
/// impl Dep for Impl {}
///
/// let mut container = container! {
/// dep => ImplProvider,
/// transient_dep => ImplProvider; transient,
/// singleton_dep => ImplProvider; singleton,
/// scoped_dep => ImplProvider; scoped
/// };
/// ```
///
/// For details on how each registration works, see [`coi::Registration`]
///
/// [`coi::Registration`]: enum.Registration.html
#[macro_export]
macro_rules! container {
(@registration $provider:expr; scoped) => {
$crate::Registration::new(
$crate::RegistrationKind::Scoped,
$provider
)
};
(@registration $provider:expr; singleton) => {
$crate::Registration::new(
$crate::RegistrationKind::Singleton,
$provider
)
};
(@registration $provider:expr; transient) => {
$crate::Registration::new(
$crate::RegistrationKind::Transient,
$provider
)
};
(@registration $provider:expr) => {
$crate::Registration::new(
$crate::RegistrationKind::Transient,
$provider
)
};
(@line $builder:ident $key:ident $provider:expr $(; $call:ident)?) => {
$builder = $builder.register_as(stringify!($key), container!(@registration $provider $(; $call)?));
};
($($key:ident => $provider:expr $(; $call:ident)?),+) => {
container!{ $( $key => $provider $(; $call)?, )+ }
};
($($key:ident => $provider:expr $(; $call:ident)?,)+) => {
{
let mut builder = ::coi::ContainerBuilder::new();
$(container!(@line builder $key $provider $(; $call)?);)+
builder.build()
}
}
}
/// Helper macro to ease use of "debug" feature when providing closures
#[macro_export]
macro_rules! provide_closure {
// Support any comma format
($($move:ident)? |$($arg:ident: Arc<$ty:ty>),*| $(-> $res:ty)? $block:block) => {
provide_closure!($($move)? |$($arg: Arc<$ty>,)*| $(-> $res)? $block)
};
// actual macro
($($move:ident)? |$($arg:ident: Arc<$ty:ty>,)*| $(-> $res:ty)? $block:block) => {
{
use $crate::__provide_closure_impl;
__provide_closure_impl!($($move)? |$($arg: $ty,)*| $(-> $res)? $block)
}
};
// handle case of missing argument types
($($move:ident)? |$($arg:ident),*| $(-> $res:ty)? $block:block) => {
compile_error!("this macro requires closure arguments to have explicitly defined parameter types")
};
}
#[doc(hidden)]
#[macro_export]
#[cfg(not(feature = "debug"))]
macro_rules! __provide_closure_impl {
($($move:ident)? |$($arg:ident: $ty:ty,)*| $(-> $res:ty)? $block:block) => {
$($move)? |_container: &$crate::Container| $(-> $res)? {
$(let $arg = _container.resolve::<$ty>(stringify!($arg))?;)*
$block
}
};
}
#[doc(hidden)]
#[macro_export]
#[cfg(feature = "debug")]
macro_rules! __provide_closure_impl {
($($move:ident)? |$($arg:ident: $ty:ty,)*| $(-> $res:ty)? $block:block) => {
(
&[$(stringify!($arg),)*],
$($move)? |_container: &$crate::Container| $(-> $res)? {
$(let $arg = _container.resolve::<$ty>(stringify!($arg))?;)*
$block
}
)
};
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn ensure_display() {
use std::io;
let error = Error::KeyNotFound("S".to_owned());
let displayed = format!("{}", error);
assert_eq!(displayed, "Key not found: S");
let error = Error::TypeMismatch("S2".to_owned());
let displayed = format!("{}", error);
assert_eq!(displayed, "Type mismatch for key: S2");
let error = Error::Inner(Box::new(io::Error::new(io::ErrorKind::NotFound, "oh no!")));
let displayed = format!("{}", error);
assert_eq!(displayed, "Inner error: oh no!");
}
#[test]
fn ensure_debug() {
let error = Error::KeyNotFound("S".to_owned());
let debugged = format!("{:?}", error);
assert_eq!(debugged, "KeyNotFound(\"S\")");
let error = Error::TypeMismatch("S2".to_owned());
let debugged = format!("{:?}", error);
assert_eq!(debugged, "TypeMismatch(\"S2\")");
}
#[test]
fn conainer_builder_is_clonable() {
let builder = ContainerBuilder::new();
for _ in 0..2 {
let builder = builder.clone();
let _container = builder.build();
}
}
#[test]
fn container_is_clonable() {
let container = ContainerBuilder::new().build();
#[allow(clippy::redundant_clone)]
let _container = container.clone();
}
}
|
//! A simple object pool.
//!
//! `ObjPool<T>` is basically just a `Vec<Option<T>>`, which allows you to:
//!
//! * Insert an object (reuse an existing `None` element, or append to the end) and get an `ObjId`
//! in return.
//! * Remove object with a specified `ObjId`.
//! * Access object with a specified `ObjId`.
//! * Convert `ObjId` to index and back for specified `ObjPool`.
//!
//! # Features
//!
//! * Implements debug-only checks for `ObjId` and `ObjPool` correspondence. It will panic in debug
//! with some pretty high probability (depending on the actual size of the `ObjPool`) in case of
//! using an `ObjId` from the one `ObjPool` with another `ObjPool`. It helps a lot to find bugs in
//! case of using many `ObjPool`s in the same application with no overhead in release.
//!
//! * Provides 32-bit long `OptionObjId` type as a memory-footprint optimization replacement for
//! `Option<ObjId>` in case you don't need to store more than `u32::max_value() / 2` objects in
//! your `ObjPool`.
//!
//! # Limitations:
//!
//! * `ObjPool` can only store up to `u32::max_value() / 2` objects in it in case you are using
//! `OptionObjId` as long as `OptionObjId` treats `u32::max_value()` as an universal `None`.
//!
//! * `ObjId` is always 32-bit long.
//!
//! # Examples
//!
//! Some data structures built using `ObjPool<T>`:
//!
//! * [Doubly linked list](https://github.com/artemshein/obj-pool/blob/master/examples/linked_list.rs)
//! * [Splay tree](https://github.com/artemshein/obj-pool/blob/master/examples/splay_tree.rs)
use std::{ops::{Index, IndexMut}, str::FromStr, num::ParseIntError, ptr, mem, iter, fmt, vec};
use unreachable::unreachable;
#[cfg(debug_assertions)]
use rand::prelude::random;
use std::ops::Deref;
use std::slice;
pub use optional;
#[cfg(feature = "serde_support")]
use serde::{Serialize, Deserialize};
/// A slot, which is either vacant or occupied.
///
/// Vacant slots in object pool are linked together into a singly linked list. This allows the object pool to
/// efficiently find a vacant slot before inserting a new object, or reclaiming a slot after
/// removing an object.
#[derive(Clone)]
enum Slot<T> {
/// Vacant slot, containing index to the next slot in the linked list.
Vacant(u32),
/// Occupied slot, containing a value.
Occupied(T),
}
/// An id of the object in an `ObjPool`.
///
/// In release it is basically just an index in the underlying vector, but in debug it's an `index` +
/// `ObjPool`-specific `offset`. This is made to be able to check `ObjId` if it's from the same
/// `ObjPool` we are trying to get an object from.
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
#[cfg_attr(feature = "serde_support", derive(Serialize, Deserialize))]
pub struct ObjId(pub u32);
impl ObjId {
#[inline]
pub fn into_index(self, offset: u32) -> u32 {
if cfg!(debug_assertions) {
self.0 - offset
} else {
self.0
}
}
#[inline]
pub fn from_index(index: u32, offset: u32) -> ObjId {
ObjId(if cfg!(debug_assertions) {
index + offset
} else {
index
})
}
}
impl std::fmt::Display for ObjId {
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
self.0.fmt(f)
}
}
impl FromStr for ObjId {
type Err = ParseIntError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(ObjId(s.parse::<u32>()?))
}
}
impl Deref for ObjId {
type Target = u32;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<u32> for ObjId {
fn from(v: u32) -> ObjId {
ObjId(v)
}
}
impl optional::Noned for ObjId {
fn is_none(&self) -> bool { self.0 == u32::max_value() }
fn get_none() -> ObjId {
u32::max_value().into()
}
}
impl optional::OptEq for ObjId {
fn opt_eq(&self, other: &Self) -> bool {
self.0 == other.0
}
}
/// Optimization for `Option<ObjId>` which treats `ObjId` of `u32::max_value()` as `None`.
/// It's safe to store any `ObjPool` `ObjId` in this wrapper as long as the size of the `ObjPool` is
/// less than `u32::max_value() / 2`.
///
/// ```
/// use obj_pool::{ObjPool, ObjId, OptionObjId};
///
/// let mut obj_pool = ObjPool::default();
///
/// let mut n: OptionObjId = obj_pool.insert(10).into();
///
/// assert!(n.is_some());
/// assert_eq!(10, obj_pool[n.unwrap()]);
///
/// n = OptionObjId::none();
/// assert!(n.is_none());
/// assert_eq!(None, n.into_option());
/// ```
pub type OptionObjId = optional::Optioned<ObjId>;
/// An object pool.
///
/// `ObjPool<T>` holds an array of slots for storing objects.
/// Every slot is always in one of two states: occupied or vacant.
///
/// Essentially, this is equivalent to `Vec<Option<T>>`.
///
/// # Insert and remove
///
/// When inserting a new object into object pool, a vacant slot is found and then the object is placed
/// into the slot. If there are no vacant slots, the array is reallocated with bigger capacity.
/// The cost of insertion is amortized `O(1)`.
///
/// When removing an object, the slot containing it is marked as vacant and the object is returned.
/// The cost of removal is `O(1)`.
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::new();
/// let a = obj_pool.insert(10);
/// let b = obj_pool.insert(20);
///
/// assert_ne!(a, b); // ids are not the same
///
/// assert_eq!(obj_pool.remove(a), Some(10));
/// assert_eq!(obj_pool.get(a), None); // there is no object with this `ObjId` anymore
///
/// assert_eq!(obj_pool.insert(30), a); // slot is reused, got the same `ObjId`
/// ```
///
/// # Indexing
///
/// You can also access objects in an object pool by `ObjId`.
/// However, accessing an object with invalid `ObjId` will result in panic.
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::new();
/// let a = obj_pool.insert(10);
/// let b = obj_pool.insert(20);
///
/// assert_eq!(obj_pool[a], 10);
/// assert_eq!(obj_pool[b], 20);
///
/// obj_pool[a] += obj_pool[b];
/// assert_eq!(obj_pool[a], 30);
/// ```
///
/// To access slots without fear of panicking, use `get` and `get_mut`, which return `Option`s.
pub struct ObjPool<T> {
/// Slots in which objects are stored.
slots: Vec<Slot<T>>,
/// Number of occupied slots in the object pool.
len: u32,
/// Index of the first vacant slot in the linked list.
head: u32,
/// Offset for index (debug only)
offset: u32,
}
impl<T> AsRef<ObjPool<T>> for ObjPool<T> {
fn as_ref(&self) -> &ObjPool<T> {
self
}
}
impl<T> AsMut<ObjPool<T>> for ObjPool<T> {
fn as_mut(&mut self) -> &mut ObjPool<T> {
self
}
}
impl<T> ObjPool<T> {
/// Constructs a new, empty object pool.
///
/// The object pool will not allocate until objects are inserted into it.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool: ObjPool<i32> = ObjPool::new();
/// ```
#[inline]
pub fn new() -> Self {
let offset = Self::new_offset();
ObjPool {
slots: Vec::new(),
len: 0,
head: Self::null_index_with_offset(offset),
offset,
}
}
/// Constructs a new, empty object pool at compile time.
///
/// The object pool will not allocate until objects are inserted into it.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool: ObjPool<i32> = ObjPool::new();
/// ```
#[inline]
pub const fn new_const() -> Self {
let offset = Self::new_const_offset();
ObjPool {
slots: Vec::new(),
len: 0,
head: Self::null_index_with_offset(offset),
offset,
}
}
#[inline]
const fn null_index_with_offset(offset: u32) -> u32 {
offset.wrapping_add(u32::max_value())
}
#[inline]
fn null_index(&self) -> u32 {
Self::null_index_with_offset(self.offset)
}
/// Returns an offset for this `ObjPool`, in release mode it's `0`, in debug mode it's
/// between `0` and `u32::max_value() / 2`.
#[inline]
pub fn offset(&self) -> u32 {
self.offset
}
/// For debug purposes only.
#[cfg(debug_assertions)]
pub fn with_offset(offset: u32) -> Self {
ObjPool {
slots: Vec::new(),
len: 0,
head: Self::null_index_with_offset(offset),
offset
}
}
#[inline]
#[cfg(debug_assertions)]
fn new_offset() -> u32 {
random::<u32>() / 2 // We want to keep u32::max_value() as an ultimate invalid value
}
#[inline]
#[cfg(debug_assertions)]
const fn new_const_offset() -> u32 {
const_random::const_random!(u32) / 2 // We want to keep u32::max_value() as an ultimate invalid value
}
#[inline]
#[cfg(not(debug_assertions))]
fn new_offset() -> u32 {
0
}
#[inline]
#[cfg(not(debug_assertions))]
const fn new_const_offset() -> u32 {
0
}
/// Get an index in the `ObjPool` for the given `ObjId`.
#[inline]
pub fn obj_id_to_index(&self, obj_id: ObjId) -> u32 {
obj_id.into_index(self.offset)
}
/// Make an `ObjId` from an index in this `ObjPool`.
#[inline]
pub fn index_to_obj_id(&self, index: u32) -> ObjId {
ObjId::from_index(index, self.offset)
}
/// Constructs a new, empty object pool with the specified capacity (number of slots).
///
/// The object pool will be able to hold exactly `capacity` objects without reallocating.
/// If `capacity` is 0, the object pool will not allocate.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::with_capacity(10);
///
/// assert_eq!(obj_pool.len(), 0);
/// assert_eq!(obj_pool.capacity(), 10);
///
/// // These inserts are done without reallocating...
/// for i in 0..10 {
/// obj_pool.insert(i);
/// }
/// assert_eq!(obj_pool.capacity(), 10);
///
/// // ... but this one will reallocate.
/// obj_pool.insert(11);
/// assert!(obj_pool.capacity() > 10);
/// ```
#[inline]
pub fn with_capacity(cap: usize) -> Self {
let offset = Self::new_offset();
ObjPool {
slots: Vec::with_capacity(cap),
len: 0,
head: Self::null_index_with_offset(offset),
offset,
}
}
/// Returns the number of slots in the object pool.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let obj_pool: ObjPool<i32> = ObjPool::with_capacity(10);
/// assert_eq!(obj_pool.capacity(), 10);
/// ```
#[inline]
pub fn capacity(&self) -> usize {
self.slots.capacity()
}
/// Returns the number of occupied slots in the object pool.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::new();
/// assert_eq!(obj_pool.len(), 0);
///
/// for i in 0..10 {
/// obj_pool.insert(());
/// assert_eq!(obj_pool.len(), i + 1);
/// }
/// ```
#[inline]
pub fn len(&self) -> u32 {
self.len
}
/// Returns `true` if all slots are vacant.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::new();
/// assert!(obj_pool.is_empty());
///
/// obj_pool.insert(1);
/// assert!(!obj_pool.is_empty());
/// ```
#[inline]
pub fn is_empty(&self) -> bool {
self.len == 0
}
/// Returns the `ObjId` of the next inserted object if no other
/// mutating calls take place in between.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::new();
///
/// let a = obj_pool.next_vacant();
/// let b = obj_pool.insert(1);
/// assert_eq!(a, b);
/// let c = obj_pool.next_vacant();
/// let d = obj_pool.insert(2);
/// assert_eq!(c, d);
/// ```
#[inline]
pub fn next_vacant(&mut self) -> ObjId {
if self.head == self.null_index() {
self.index_to_obj_id(self.len)
} else {
self.index_to_obj_id(self.head)
}
}
/// Inserts an object into the object pool and returns the `ObjId` of this object.
/// The object pool will reallocate if it's full.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::new();
///
/// let a = obj_pool.insert(1);
/// let b = obj_pool.insert(2);
/// assert!(a != b);
/// ```
pub fn insert(&mut self, object: T) -> ObjId {
self.len += 1;
if self.head == self.null_index() {
self.slots.push(Slot::Occupied(object));
self.index_to_obj_id(self.len - 1)
} else {
let index = self.head;
match self.slots[index as usize] {
Slot::Vacant(next) => {
self.head = next;
self.slots[index as usize] = Slot::Occupied(object);
},
Slot::Occupied(_) => unreachable!(),
}
self.index_to_obj_id(index)
}
}
/// Removes the object stored by `ObjId` from the object pool and returns it.
///
/// `None` is returned in case the there is no object with such an `ObjId`.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::new();
/// let a = obj_pool.insert("hello");
///
/// assert_eq!(obj_pool.len(), 1);
/// assert_eq!(obj_pool.remove(a), Some("hello"));
///
/// assert_eq!(obj_pool.len(), 0);
/// assert_eq!(obj_pool.remove(a), None);
/// ```
pub fn remove(&mut self, obj_id: ObjId) -> Option<T> {
let index = self.obj_id_to_index(obj_id);
match self.slots.get_mut(index as usize) {
None => None,
Some(&mut Slot::Vacant(_)) => None,
Some(slot @ &mut Slot::Occupied(_)) => {
if let Slot::Occupied(object) = mem::replace(slot, Slot::Vacant(self.head)) {
self.head = index;
self.len -= 1;
Some(object)
} else {
unreachable!();
}
}
}
}
/// Clears the object pool, removing and dropping all objects it holds. Keeps the allocated memory
/// for reuse.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::new();
/// for i in 0..10 {
/// obj_pool.insert(i);
/// }
///
/// assert_eq!(obj_pool.len(), 10);
/// obj_pool.clear();
/// assert_eq!(obj_pool.len(), 0);
/// ```
#[inline]
pub fn clear(&mut self) {
self.slots.clear();
self.len = 0;
self.head = self.null_index();
}
/// Returns a reference to the object by its `ObjId`.
///
/// If object is not found with given `obj_id`, `None` is returned.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::new();
/// let obj_id = obj_pool.insert("hello");
///
/// assert_eq!(obj_pool.get(obj_id), Some(&"hello"));
/// obj_pool.remove(obj_id);
/// assert_eq!(obj_pool.get(obj_id), None);
/// ```
pub fn get(&self, obj_id: ObjId) -> Option<&T> {
let index = self.obj_id_to_index(obj_id) as usize;
match self.slots.get(index) {
None => None,
Some(&Slot::Vacant(_)) => None,
Some(&Slot::Occupied(ref object)) => Some(object),
}
}
/// Returns a mutable reference to the object by its `ObjId`.
///
/// If object can't be found, `None` is returned.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::new();
/// let obj_id = obj_pool.insert(7);
///
/// assert_eq!(obj_pool.get_mut(obj_id), Some(&mut 7));
/// *obj_pool.get_mut(obj_id).unwrap() *= 10;
/// assert_eq!(obj_pool.get_mut(obj_id), Some(&mut 70));
/// ```
#[inline]
pub fn get_mut(&mut self, obj_id: ObjId) -> Option<&mut T> {
let index = self.obj_id_to_index(obj_id) as usize;
match self.slots.get_mut(index) {
None => None,
Some(&mut Slot::Vacant(_)) => None,
Some(&mut Slot::Occupied(ref mut object)) => Some(object),
}
}
/// Returns a reference to the object by its `ObjId`.
///
/// # Safety
///
/// Behavior is undefined if object can't be found.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::new();
/// let obj_id = obj_pool.insert("hello");
///
/// unsafe { assert_eq!(&*obj_pool.get_unchecked(obj_id), &"hello") }
/// ```
pub unsafe fn get_unchecked(&self, obj_id: ObjId) -> &T {
match self.slots.get(self.obj_id_to_index(obj_id) as usize) {
None => unreachable(),
Some(&Slot::Vacant(_)) => unreachable(),
Some(&Slot::Occupied(ref object)) => object,
}
}
/// Returns a mutable reference to the object by its `ObjId`.
///
/// # Safety
///
/// Behavior is undefined if object can't be found.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::new();
/// let obj_id = obj_pool.insert("hello");
///
/// unsafe { assert_eq!(&*obj_pool.get_unchecked_mut(obj_id), &"hello") }
/// ```
pub unsafe fn get_unchecked_mut(&mut self, obj_id: ObjId) -> &mut T {
let index = self.obj_id_to_index(obj_id) as usize;
match self.slots.get_mut(index) {
None => unreachable(),
Some(&mut Slot::Vacant(_)) => unreachable(),
Some(&mut Slot::Occupied(ref mut object)) => object,
}
}
/// Swaps two objects in the object pool.
///
/// The two `ObjId`s are `a` and `b`.
///
/// # Panics
///
/// Panics if any of the `ObjId`s is invalid.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::new();
/// let a = obj_pool.insert(7);
/// let b = obj_pool.insert(8);
///
/// obj_pool.swap(a, b);
/// assert_eq!(obj_pool.get(a), Some(&8));
/// assert_eq!(obj_pool.get(b), Some(&7));
/// ```
#[inline]
pub fn swap(&mut self, a: ObjId, b: ObjId) {
unsafe {
let fst = self.get_mut(a).unwrap() as *mut _;
let snd = self.get_mut(b).unwrap() as *mut _;
if a != b {
ptr::swap(fst, snd);
}
}
}
/// Reserves capacity for at least `additional` more objects to be inserted. The object pool may
/// reserve more space to avoid frequent reallocations.
///
/// # Panics
///
/// Panics if the new capacity overflows `u32`.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::new();
/// obj_pool.insert("hello");
///
/// obj_pool.reserve(10);
/// assert!(obj_pool.capacity() >= 11);
/// ```
pub fn reserve(&mut self, additional: u32) {
let vacant = self.slots.len() as u32 - self.len;
if additional > vacant {
self.slots.reserve((additional - vacant) as usize);
}
}
/// Reserves the minimum capacity for exactly `additional` more objects to be inserted.
///
/// Note that the allocator may give the object pool more space than it requests.
///
/// # Panics
///
/// Panics if the new capacity overflows `u32`.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::new();
/// obj_pool.insert("hello");
///
/// obj_pool.reserve_exact(10);
/// assert!(obj_pool.capacity() >= 11);
/// ```
pub fn reserve_exact(&mut self, additional: u32) {
let vacant = self.slots.len() as u32 - self.len;
if additional > vacant {
self.slots.reserve_exact((additional - vacant) as usize);
}
}
/// Returns an iterator over occupied slots.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::new();
/// obj_pool.insert(1);
/// obj_pool.insert(2);
/// obj_pool.insert(4);
///
/// let mut iterator = obj_pool.iter();
/// assert_eq!(iterator.next(), Some((obj_pool.index_to_obj_id(0), &1)));
/// assert_eq!(iterator.next(), Some((obj_pool.index_to_obj_id(1), &2)));
/// assert_eq!(iterator.next(), Some((obj_pool.index_to_obj_id(2), &4)));
/// ```
#[inline]
pub fn iter(&self) -> Iter<T> {
Iter { slots: self.slots.iter().enumerate(), offset: self.offset }
}
/// Returns an iterator that returns mutable references to objects.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::new();
/// obj_pool.insert("zero".to_string());
/// obj_pool.insert("one".to_string());
/// obj_pool.insert("two".to_string());
///
/// let offset = obj_pool.offset();
/// for (obj_id, object) in obj_pool.iter_mut() {
/// *object = obj_id.into_index(offset).to_string() + " " + object;
/// }
///
/// let mut iterator = obj_pool.iter();
/// assert_eq!(iterator.next(), Some((obj_pool.index_to_obj_id(0), &"0 zero".to_string())));
/// assert_eq!(iterator.next(), Some((obj_pool.index_to_obj_id(1), &"1 one".to_string())));
/// assert_eq!(iterator.next(), Some((obj_pool.index_to_obj_id(2), &"2 two".to_string())));
/// ```
#[inline]
pub fn iter_mut(&mut self) -> IterMut<T> {
IterMut { slots: self.slots.iter_mut().enumerate(), offset: self.offset }
}
/// Shrinks the capacity of the object pool as much as possible.
///
/// It will drop down as close as possible to the length but the allocator may still inform
/// the object pool that there is space for a few more elements.
///
/// # Examples
///
/// ```
/// use obj_pool::ObjPool;
///
/// let mut obj_pool = ObjPool::with_capacity(10);
/// obj_pool.insert("first".to_string());
/// obj_pool.insert("second".to_string());
/// obj_pool.insert("third".to_string());
/// assert_eq!(obj_pool.capacity(), 10);
/// obj_pool.shrink_to_fit();
/// assert!(obj_pool.capacity() >= 3);
/// ```
pub fn shrink_to_fit(&mut self) {
self.slots.shrink_to_fit();
}
}
impl<T> fmt::Debug for ObjPool<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "ObjPool {{ ... }}")
}
}
impl<T> Index<ObjId> for ObjPool<T> {
type Output = T;
#[inline]
fn index(&self, obj_id: ObjId) -> &T {
self.get(obj_id).expect("object not found")
}
}
impl<T> IndexMut<ObjId> for ObjPool<T> {
#[inline]
fn index_mut(&mut self, obj_id: ObjId) -> &mut T {
self.get_mut(obj_id).expect("object not found")
}
}
impl<T> Default for ObjPool<T> {
fn default() -> Self {
ObjPool::new()
}
}
impl<T: Clone> Clone for ObjPool<T> {
fn clone(&self) -> Self {
ObjPool {
slots: self.slots.clone(),
len: self.len,
head: self.head,
offset: self.offset,
}
}
}
/// An iterator over the occupied slots in a `ObjPool`.
pub struct IntoIter<T> {
slots: iter::Enumerate<vec::IntoIter<Slot<T>>>,
offset: u32,
}
impl<T> IntoIter<T> {
#[inline]
pub fn obj_id_to_index(&self, obj_id: ObjId) -> u32 {
obj_id.into_index(self.offset)
}
#[inline]
pub fn index_to_obj_id(&self, index: u32) -> ObjId {
ObjId::from_index(index, self.offset)
}
}
impl<T> Iterator for IntoIter<T> {
type Item = (ObjId, T);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
while let Some((index, slot)) = self.slots.next() {
if let Slot::Occupied(object) = slot {
return Some((self.index_to_obj_id(index as u32), object));
}
}
None
}
}
impl<T> IntoIterator for ObjPool<T> {
type Item = (ObjId, T);
type IntoIter = IntoIter<T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
IntoIter { slots: self.slots.into_iter().enumerate(), offset: self.offset }
}
}
impl<T> iter::FromIterator<T> for ObjPool<T> {
fn from_iter<U: IntoIterator<Item=T>>(iter: U) -> ObjPool<T> {
let iter = iter.into_iter();
let mut obj_pool = ObjPool::with_capacity(iter.size_hint().0);
for i in iter {
obj_pool.insert(i);
}
obj_pool
}
}
impl<T> fmt::Debug for IntoIter<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "IntoIter {{ ... }}")
}
}
/// An iterator over references to the occupied slots in a `ObjPool`.
pub struct Iter<'a, T: 'a> {
slots: iter::Enumerate<slice::Iter<'a, Slot<T>>>,
offset: u32,
}
impl<'a, T: 'a> Iter<'a, T> {
#[inline]
fn index_to_obj_id(&self, index: u32) -> ObjId {
ObjId::from_index(index, self.offset)
}
}
impl<'a, T> Iterator for Iter<'a, T> {
type Item = (ObjId, &'a T);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
while let Some((index, slot)) = self.slots.next() {
if let Slot::Occupied(ref object) = *slot {
return Some((self.index_to_obj_id(index as u32), object));
}
}
None
}
}
impl<'a, T> IntoIterator for &'a ObjPool<T> {
type Item = (ObjId, &'a T);
type IntoIter = Iter<'a, T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T> fmt::Debug for Iter<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Iter {{ ... }}")
}
}
/// An iterator over mutable references to the occupied slots in a `Arena`.
pub struct IterMut<'a, T: 'a> {
slots: iter::Enumerate<slice::IterMut<'a, Slot<T>>>,
offset: u32,
}
impl<'a, T: 'a> IterMut<'a, T> {
#[inline]
pub fn obj_id_to_index(&self, obj_id: ObjId) -> u32 {
obj_id.into_index(self.offset)
}
#[inline]
pub fn index_to_obj_id(&self, index: u32) -> ObjId {
ObjId::from_index(index, self.offset)
}
}
impl<'a, T> Iterator for IterMut<'a, T> {
type Item = (ObjId, &'a mut T);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
while let Some((index, slot)) = self.slots.next() {
if let Slot::Occupied(ref mut object) = *slot {
return Some((self.index_to_obj_id(index as u32), object));
}
}
None
}
}
impl<'a, T> IntoIterator for &'a mut ObjPool<T> {
type Item = (ObjId, &'a mut T);
type IntoIter = IterMut<'a, T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
impl<'a, T> fmt::Debug for IterMut<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "IterMut {{ ... }}")
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn new() {
let obj_pool = ObjPool::<i32>::new();
assert!(obj_pool.is_empty());
assert_eq!(obj_pool.len(), 0);
assert_eq!(obj_pool.capacity(), 0);
}
#[test]
fn insert() {
let mut obj_pool = ObjPool::new();
for i in 0..10 {
let a= obj_pool.insert(i * 10);
assert_eq!(obj_pool[a], i * 10);
}
assert!(!obj_pool.is_empty());
assert_eq!(obj_pool.len(), 10);
}
#[test]
fn with_capacity() {
let mut obj_pool = ObjPool::with_capacity(10);
assert_eq!(obj_pool.capacity(), 10);
for _ in 0..10 {
obj_pool.insert(());
}
assert_eq!(obj_pool.len(), 10);
assert_eq!(obj_pool.capacity(), 10);
obj_pool.insert(());
assert_eq!(obj_pool.len(), 11);
assert!(obj_pool.capacity() > 10);
}
#[test]
fn remove() {
let mut obj_pool = ObjPool::new();
let a = obj_pool.insert(0);
let b = obj_pool.insert(10);
let c = obj_pool.insert(20);
obj_pool.insert(30);
assert_eq!(obj_pool.len(), 4);
assert_eq!(obj_pool.remove(b), Some(10));
assert_eq!(obj_pool.remove(c), Some(20));
assert_eq!(obj_pool.len(), 2);
obj_pool.insert(-1);
obj_pool.insert(-1);
assert_eq!(obj_pool.len(), 4);
assert_eq!(obj_pool.remove(a), Some(0));
obj_pool.insert(-1);
assert_eq!(obj_pool.len(), 4);
obj_pool.insert(400);
assert_eq!(obj_pool.len(), 5);
}
#[test]
fn clear() {
let mut obj_pool = ObjPool::new();
obj_pool.insert(10);
obj_pool.insert(20);
assert!(!obj_pool.is_empty());
assert_eq!(obj_pool.len(), 2);
let cap = obj_pool.capacity();
obj_pool.clear();
assert!(obj_pool.is_empty());
assert_eq!(obj_pool.len(), 0);
assert_eq!(obj_pool.capacity(), cap);
}
#[test]
fn indexing() {
let mut obj_pool = ObjPool::new();
let a = obj_pool.insert(10);
let b = obj_pool.insert(20);
let c = obj_pool.insert(30);
obj_pool[b] += obj_pool[c];
assert_eq!(obj_pool[a], 10);
assert_eq!(obj_pool[b], 50);
assert_eq!(obj_pool[c], 30);
}
#[test]
#[should_panic]
fn indexing_vacant() {
let mut obj_pool = ObjPool::new();
let _ = obj_pool.insert(10);
let b = obj_pool.insert(20);
let _ = obj_pool.insert(30);
obj_pool.remove(b);
obj_pool[b];
}
#[test]
#[should_panic]
fn invalid_indexing() {
let mut obj_pool = ObjPool::new();
obj_pool.insert(10);
obj_pool.insert(20);
let a = obj_pool.insert(30);
obj_pool.remove(a);
obj_pool[a];
}
#[test]
fn get() {
let mut obj_pool = ObjPool::new();
let a = obj_pool.insert(10);
let b = obj_pool.insert(20);
let c = obj_pool.insert(30);
*obj_pool.get_mut(b).unwrap() += *obj_pool.get(c).unwrap();
assert_eq!(obj_pool.get(a), Some(&10));
assert_eq!(obj_pool.get(b), Some(&50));
assert_eq!(obj_pool.get(c), Some(&30));
obj_pool.remove(b);
assert_eq!(obj_pool.get(b), None);
assert_eq!(obj_pool.get_mut(b), None);
}
#[test]
fn reserve() {
let mut obj_pool = ObjPool::new();
obj_pool.insert(1);
obj_pool.insert(2);
obj_pool.reserve(10);
assert!(obj_pool.capacity() >= 11);
}
#[test]
fn reserve_exact() {
let mut obj_pool = ObjPool::new();
obj_pool.insert(1);
obj_pool.insert(2);
obj_pool.reserve(10);
assert!(obj_pool.capacity() >= 11);
}
#[test]
fn iter() {
let mut arena = ObjPool::new();
let a = arena.insert(10);
let b = arena.insert(20);
let c = arena.insert(30);
let d = arena.insert(40);
arena.remove(b);
let mut it = arena.iter();
assert_eq!(it.next(), Some((a, &10)));
assert_eq!(it.next(), Some((c, &30)));
assert_eq!(it.next(), Some((d, &40)));
assert_eq!(it.next(), None);
}
#[test]
fn iter_mut() {
let mut obj_pool = ObjPool::with_offset(0);
let a = obj_pool.insert(10);
let b = obj_pool.insert(20);
let c = obj_pool.insert(30);
let d = obj_pool.insert(40);
obj_pool.remove(b);
{
let mut it = obj_pool.iter_mut();
assert_eq!(it.next(), Some((a, &mut 10)));
assert_eq!(it.next(), Some((c, &mut 30)));
assert_eq!(it.next(), Some((d, &mut 40)));
assert_eq!(it.next(), None);
}
for (obj_id, value) in &mut obj_pool {
*value += *obj_id;
}
let mut it = obj_pool.iter_mut();
assert_eq!(*it.next().unwrap().1, 10 + *a);
assert_eq!(*it.next().unwrap().1, 30 + *c);
assert_eq!(*it.next().unwrap().1, 40 + *d);
assert_eq!(it.next(), None);
}
#[test]
fn from_iter() {
let obj_pool: ObjPool<_> = [10, 20, 30, 40].iter().cloned().collect();
let mut it = obj_pool.iter();
assert_eq!(it.next(), Some((obj_pool.index_to_obj_id(0), &10)));
assert_eq!(it.next(), Some((obj_pool.index_to_obj_id(1), &20)));
assert_eq!(it.next(), Some((obj_pool.index_to_obj_id(2), &30)));
assert_eq!(it.next(), Some((obj_pool.index_to_obj_id(3), &40)));
assert_eq!(it.next(), None);
}
#[test]
#[should_panic]
fn wrong_pool_obj_id() {
let mut obj_pool1 = ObjPool::with_offset(0);
let mut obj_pool2 = ObjPool::with_offset(100);
let a = obj_pool1.insert(10);
let b = obj_pool2.insert(20);
assert_eq!(Some(&10), obj_pool1.get(a));
assert_eq!(Some(&20), obj_pool2.get(b));
assert_eq!(None, obj_pool1.get(b));
assert_eq!(None, obj_pool2.get(a))
}
}
|
use arduino_uno::prelude::*;
use arduino_uno::adc::Adc;
use arduino_uno::hal::port::{
mode::Analog,
portc::PC5,
};
pub type RngType = usize;
/// Implementation of a sufficiently-random Pseudo Random Number Generator
/// that utilizes an ADC.
pub struct XOrShiftPrng {
/// The current random number.
bits: RngType,
/// The analog pin from which to read. This pin is expected to be floating.
pin: PC5<Analog>,
}
impl XOrShiftPrng {
const BIT_COUNT: usize = RngType::MIN.count_zeros() as usize;
/// Create and initialize a new XOrShiftPrng object.
pub fn new(pin: PC5<Analog>, adc: &mut Adc) -> Self {
let mut rng = Self { bits: 0, pin };
rng.shuffle(adc);
rng
}
/// Shuffle the bits, i.e. generate a new random number.
///
/// This is done by successively reading the 8 least significant bits
/// from a designated analog input and XOR-ing it with the current random
/// number, while with each read rotating the random number's bits to the
/// left. This is done for each bit of the RngType type.
fn shuffle(&mut self, adc: &mut Adc) {
for _ in 0..Self::BIT_COUNT {
let sample: u16 = nb::block!(adc.read(&mut self.pin)).void_unwrap();
self.bits = self.bits.rotate_left(1) ^ ((sample & 255) as RngType);
}
}
/// Generate a random (ish) RngType number.
///
/// # Arguments
/// * adc - The Analog-Digital convertor required to read analog data.
pub fn generate(&mut self, adc: &mut Adc) -> RngType {
self.shuffle(adc);
self.bits.clone()
}
}
|
//! Rate Based Filtering
//!
//! The BurstFilter provides a mechanism to control the rate at which log events are processed by
//! silently discarding events after the maximum limit has been reached.
//!
//! # Example
//!
//! ```toml
//! [filter.burst]
//! max_burst = 10.0
//! # Optional (Defaults shown)
//! level = "Warn"
//! on_match = "Accept"
//! on_mismatch = "Deny"
//! ```
use Filter;
use chrono::{DateTime, Duration, UTC};
use config::filter::MatchAction;
use config::filter::MatchAction::*;
use log::{LogLevelFilter, LogRecord};
use std::cell::Cell;
use std::ops::Add;
#[cfg_attr(test, derive(PartialEq))]
#[derive(Clone, Debug)]
/// Burst Filter Configuration
pub struct BurstFilter {
/// Optional minimal level of messages to be filtered. Anything at or above this level will be
/// filtered out if `max_burst` has been exceeded. The default is `Warn` meaning any messages
/// that are lower than `Warn` (i.e. `Error`) will be logged regardless of the size of a burst.
level: Option<LogLevelFilter>,
/// The maximum number of events per second that can occur before events are filtered.
max_burst: f64,
/// The action to take when the filter matches. Defaults to `Neutral`.
on_match: Option<MatchAction>,
/// The action to take when the filter does not match. Defaults to `Deny`.
on_mismatch: Option<MatchAction>,
/// The time when we can reset the event count to 0.
next: Cell<DateTime<UTC>>,
/// Events received. If `event_count` > `max_burst`, messages will be filtered.
event_count: Cell<u32>,
}
impl BurstFilter {
/// Create a new burst filter with the given max burst limit in Events / Second.
pub fn new(max_burst: f64) -> BurstFilter {
BurstFilter {
level: None,
max_burst: max_burst,
on_match: None,
on_mismatch: None,
next: Cell::new(UTC::now().add(Duration::seconds(1))),
event_count: Cell::new(0),
}
}
/// Set the minimum level to be filtered if the max burst limit is hit. Anything lowere will
/// always be logged.
pub fn level(mut self, level: Option<LogLevelFilter>) -> BurstFilter {
self.level = level;
self
}
/// Set the on match MatchAction. Default is Neutral.
pub fn on_match(mut self, action: Option<MatchAction>) -> BurstFilter {
self.on_match = action;
self
}
/// Set the on mis-match MatchAction. Default is Deny.
pub fn on_mismatch(mut self, action: Option<MatchAction>) -> BurstFilter {
self.on_mismatch = action;
self
}
/// Set the next time to reset the event count to 0. This is usually set to now() + 1 second.
pub fn next(self, next: DateTime<UTC>) -> BurstFilter {
self.next.set(next);
self
}
/// Set the event count.
pub fn event_count(self, count: u32) -> BurstFilter {
self.event_count.set(count);
self
}
}
impl Filter for BurstFilter {
fn filter(&self, record: &LogRecord) -> MatchAction {
let now = UTC::now();
let level = match self.level {
Some(l) => l,
None => LogLevelFilter::Warn,
};
// Increment the event count.
self.event_count.set(self.event_count.get() + 1);
let count = self.event_count.get();
let matched = if record.level() < level {
true
} else if now > self.next.get() {
// More than 1 second has passed, reset next and event count.
self.next.set(now.add(Duration::seconds(1)));
self.event_count.set(0);
true
} else {
// Less than 1 second has passed. Match if max_burst is greater than the current count,
// else don't match.
self.max_burst > count as f64
};
if matched {
match self.on_match {
Some(ref m) => m.clone(),
None => Neutral,
}
} else {
match self.on_mismatch {
Some(ref m) => m.clone(),
None => Deny,
}
}
}
}
#[cfg(feature = "rustc-serialize")]
mod rs {
use chrono::{Duration, UTC};
use config::rs::read_llf_opt;
use rustc_serialize::{Decodable, Decoder};
use super::*;
use std::ops::Add;
impl Decodable for BurstFilter {
fn decode<D: Decoder>(d: &mut D) -> Result<BurstFilter, D::Error> {
d.read_struct("BurstFilter", 4, |d| {
let level = try!(d.read_struct_field("level", 1, |d| d.read_option(read_llf_opt)));
let mb = try!(d.read_struct_field("max_burst", 2, |d| Decodable::decode(d)));
let on_match = try!(d.read_struct_field("on_match", 3, |d| Decodable::decode(d)));
let on_mismatch = try!(d.read_struct_field("on_mismatch",
4,
|d| Decodable::decode(d)));
let bf = BurstFilter::new(mb)
.level(level)
.on_match(on_match)
.on_mismatch(on_mismatch)
.next(UTC::now().add(Duration::seconds(1)))
.event_count(0);
Ok(bf)
})
}
}
}
#[cfg(feature = "serde")]
mod serde {
use config::serde::LogLevelFilterField;
use config::filter::serde::MatchActionField;
use super::*;
use serde::{Deserialize, Deserializer};
use serde::de::{MapVisitor, Visitor};
enum BurstFilterField {
Level,
MaxBurst,
OnMatch,
OnMismatch,
}
impl Deserialize for BurstFilterField {
fn deserialize<D>(deserializer: &mut D) -> Result<BurstFilterField, D::Error>
where D: Deserializer
{
struct BurstFilterFieldVisitor;
impl Visitor for BurstFilterFieldVisitor {
type Value = BurstFilterField;
fn visit_str<E>(&mut self, value: &str) -> Result<BurstFilterField, E>
where E: ::serde::de::Error
{
match value {
"level" => Ok(BurstFilterField::Level),
"max_burst" => Ok(BurstFilterField::MaxBurst),
"on_match" => Ok(BurstFilterField::OnMatch),
"on_mismatch" => Ok(BurstFilterField::OnMismatch),
_ => Err(::serde::de::Error::syntax("Unexpected field!")),
}
}
}
deserializer.visit(BurstFilterFieldVisitor)
}
}
impl Deserialize for BurstFilter {
fn deserialize<D>(deserializer: &mut D) -> Result<BurstFilter, D::Error>
where D: Deserializer
{
static FIELDS: &'static [&'static str] = &["level",
"max_burst",
"on_match",
"on_mismatch"];
deserializer.visit_struct("BurstFilter", FIELDS, BurstFilterVisitor)
}
}
struct BurstFilterVisitor;
impl Visitor for BurstFilterVisitor {
type Value = BurstFilter;
fn visit_map<V>(&mut self, mut visitor: V) -> Result<BurstFilter, V::Error>
where V: MapVisitor
{
let mut level: Option<LogLevelFilterField> = None;
let mut max_burst: Option<f64> = None;
let mut on_match: Option<MatchActionField> = None;
let mut on_mismatch: Option<MatchActionField> = None;
loop {
match try!(visitor.visit_key()) {
Some(BurstFilterField::Level) => {
level = Some(try!(visitor.visit_value()));
}
Some(BurstFilterField::MaxBurst) => {
max_burst = Some(try!(visitor.visit_value()));
}
Some(BurstFilterField::OnMatch) => {
on_match = Some(try!(visitor.visit_value()));
}
Some(BurstFilterField::OnMismatch) => {
on_mismatch = Some(try!(visitor.visit_value()));
}
None => {
break;
}
}
}
let mb = match max_burst {
Some(mb) => mb,
None => return visitor.missing_field("max_burst"),
};
let lvl = match level {
Some(l) => Some(l.level()),
None => None,
};
let omma = match on_match {
Some(om) => Some(om.match_action()),
None => None,
};
let ommma = match on_mismatch {
Some(omm) => Some(omm.match_action()),
None => None,
};
try!(visitor.end());
let bf = BurstFilter::new(mb)
.level(lvl)
.on_match(omma)
.on_mismatch(ommma);
Ok(bf)
}
}
}
#[cfg(test)]
mod test {
use decode;
use super::*;
const BASE_CONFIG: &'static str = r#"
max_burst = 10.0
"#;
const ALL_CONFIG: &'static str = r#"
max_burst = 10.0
level = "Debug"
on_match = "Accept"
on_mismatch = "Neutral"
"#;
static VALIDS: &'static [&'static str] = &[BASE_CONFIG, ALL_CONFIG];
const INVALID_CONFIG_0: &'static str = r#""#;
const INVALID_CONFIG_1: &'static str = r#"
max_burst = "not a float"
"#;
const INVALID_CONFIG_2: &'static str = r#"
max_burst = 10.0
level = "NOt A LeVel"
"#;
const INVALID_CONFIG_3: &'static str = r#"
max_burst = 10.0
on_match = 1
"#;
const INVALID_CONFIG_4: &'static str = r#"
max_burst = 10.0
on_mismatch = 1
"#;
const INVALID_CONFIG_5: &'static str = r#"
max_burst = 10.0
really = "yes really"
"#;
static INVALIDS: &'static [&'static str] = &[INVALID_CONFIG_0,
INVALID_CONFIG_1,
INVALID_CONFIG_2,
INVALID_CONFIG_3,
INVALID_CONFIG_4,
INVALID_CONFIG_5];
#[test]
fn test_valid_configs() {
let mut results = Vec::new();
for valid in VALIDS {
match decode::<BurstFilter>(valid) {
Ok(_) => {
results.push(true);
}
Err(_) => {
assert!(false);
}
};
}
assert!(results.iter().all(|x| *x));
}
#[test]
fn test_invalid_configs() {
let mut results = Vec::new();
for invalid in INVALIDS {
match decode::<BurstFilter>(invalid) {
Ok(_) => {
assert!(false);
}
Err(_) => {
results.push(true);
}
};
}
assert!(results.iter().all(|x| *x));
}
}
|
use error::*;
use il;
fn sign_extend(constant: &il::Constant) -> i64 {
let value: u64 = constant.value();
let mut mask: u64 = 0xffffffffffffffff;
mask <<= constant.bits();
if constant.value() & (1 << (constant.bits() - 1)) != 0 {
(value | mask) as i64
}
else {
value as i64
}
}
/// Evaluate an `il::Expression` where all terminals are `il::Constant`, and
/// return the resulting `il::Constant`.
pub fn eval(expr: &il::Expression) -> Result<il::Constant> {
match *expr {
il::Expression::Scalar(ref scalar) => {
return Err(ErrorKind::ExecutorScalar(scalar.name().to_string()).into());
},
il::Expression::Constant(ref constant) => Ok(constant.clone()),
il::Expression::Add(ref lhs, ref rhs) => {
let r = eval(lhs)?.value() + eval(rhs)?.value();
Ok(il::Constant::new(r, lhs.bits()))
},
il::Expression::Sub(ref lhs, ref rhs) => {
let r = eval(lhs)?.value().wrapping_sub(eval(rhs)?.value());
Ok(il::Constant::new(r, lhs.bits()))
},
il::Expression::Mul(ref lhs, ref rhs) => {
let r = eval(lhs)?.value().wrapping_mul(eval(rhs)?.value());
Ok(il::Constant::new(r, lhs.bits()))
},
il::Expression::Divu(ref lhs, ref rhs) => {
let rhs = eval(rhs)?;
if rhs.value() == 0 {
return Err(ErrorKind::Arithmetic("Division by zero".to_string()).into());
}
let r = eval(lhs)?.value() / rhs.value();
Ok(il::Constant::new(r, lhs.bits()))
},
il::Expression::Modu(ref lhs, ref rhs) => {
let rhs = eval(rhs)?;
if rhs.value() == 0 {
return Err(ErrorKind::Arithmetic("Division by zero".to_string()).into());
}
let r = eval(lhs)?.value() % rhs.value();
Ok(il::Constant::new(r, lhs.bits()))
},
il::Expression::Divs(ref lhs, ref rhs) => {
let rhs = eval(rhs)?;
if rhs.value() == 0 {
return Err(ErrorKind::Arithmetic("Division by zero".to_string()).into());
}
let r = sign_extend(&eval(lhs)?) / sign_extend(&rhs);
Ok(il::Constant::new(r as u64, lhs.bits()))
},
il::Expression::Mods(ref lhs, ref rhs) => {
let rhs = eval(rhs)?;
if rhs.value() == 0 {
return Err(ErrorKind::Arithmetic("Division by zero".to_string()).into());
}
let r = sign_extend(&eval(lhs)?) % sign_extend(&rhs);
Ok(il::Constant::new(r as u64, lhs.bits()))
},
il::Expression::And(ref lhs, ref rhs) => {
let r = eval(lhs)?.value() & eval(rhs)?.value();
Ok(il::Constant::new(r, lhs.bits()))
},
il::Expression::Or(ref lhs, ref rhs) => {
let r = eval(lhs)?.value() | eval(rhs)?.value();
Ok(il::Constant::new(r, lhs.bits()))
},
il::Expression::Xor(ref lhs, ref rhs) => {
let r = eval(lhs)?.value() ^ eval(rhs)?.value();
Ok(il::Constant::new(r, lhs.bits()))
},
il::Expression::Shl(ref lhs, ref rhs) => {
let rhs = eval(rhs)?;
if rhs.value() > lhs.bits() as u64 {
Ok(il::Constant::new(0, lhs.bits()))
}
else {
let r = eval(lhs)?.value().wrapping_shl(rhs.value() as u32);
Ok(il::Constant::new(r, lhs.bits()))
}
},
il::Expression::Shr(ref lhs, ref rhs) => {
let rhs = eval(rhs)?;
if rhs.value() > lhs.bits() as u64 {
Ok(il::Constant::new(0, lhs.bits()))
}
else {
let r = eval(lhs)?.value().wrapping_shr(rhs.value() as u32);
Ok(il::Constant::new(r, lhs.bits()))
}
},
il::Expression::Cmpeq(ref lhs, ref rhs) => {
if eval(lhs)?.value() == eval(rhs)?.value() {
Ok(il::Constant::new(1, 1))
}
else {
Ok(il::Constant::new(0, 1))
}
},
il::Expression::Cmpneq(ref lhs, ref rhs) => {
if eval(lhs)?.value() != eval(rhs)?.value() {
Ok(il::Constant::new(1, 1))
}
else {
Ok(il::Constant::new(0, 1))
}
},
il::Expression::Cmplts(ref lhs, ref rhs) => {
if sign_extend(&eval(lhs)?) < sign_extend(&eval(rhs)?) {
Ok(il::Constant::new(1, 1))
}
else {
Ok(il::Constant::new(0, 1))
}
},
il::Expression::Cmpltu(ref lhs, ref rhs) => {
if eval(lhs)?.value() < eval(rhs)?.value() {
Ok(il::Constant::new(1, 1))
}
else {
Ok(il::Constant::new(0, 1))
}
},
il::Expression::Zext(bits, ref rhs) |
il::Expression::Trun(bits, ref rhs) => {
Ok(il::Constant::new(eval(rhs)?.value(), bits))
},
il::Expression::Sext(bits, ref rhs) => {
let rhs = eval(rhs)?;
if rhs.value() >> (rhs.bits() - 1) == 1 {
let mask = !((1 << rhs.bits()) - 1);
Ok(il::Constant::new(rhs.value() | mask, bits))
}
else {
Ok(il::Constant::new(rhs.value(), bits))
}
}
}
}
#[test]
fn add() {
let lhs = il::expr_const(0x570000, 32);
let rhs = il::expr_const(0x703c, 32);
let expr = il::Expression::add(lhs, rhs).unwrap();
assert_eq!(eval(&expr).unwrap(), il::const_(0x57703c, 32));
let lhs = il::expr_const(0xffffffff, 32);
let rhs = il::expr_const(0x1, 32);
let expr = il::Expression::add(lhs, rhs).unwrap();
assert_eq!(eval(&expr).unwrap(), il::const_(0, 32));
}
#[test]
fn cmplts() {
let lhs = il::expr_const(0xffffffff, 32);
let rhs = il::expr_const(0, 32);
let expr = il::Expression::cmplts(lhs, rhs).unwrap();
assert_eq!(eval(&expr).unwrap(), il::const_(1, 1));
let lhs = il::expr_const(0, 32);
let rhs = il::expr_const(0xffffffff, 32);
let expr = il::Expression::cmplts(lhs, rhs).unwrap();
assert_eq!(eval(&expr).unwrap(), il::const_(0, 1));
} |
use std::fmt;
use std::str::Chars;
#[derive(Debug, Eq, PartialEq)]
pub enum TokenKind {
TokenEOF,
TokenComment,
TokenWhitespace,
TokenAssign,
TokenAdd,
TokenSub,
TokenNum,
TokenLabel,
TokenKeyword,
TokenLParen,
TokenRParen,
TokenJDivider,
TokenAddress,
TokenLiteral,
}
#[derive(Debug, Eq, PartialEq)]
pub struct Token {
pub kind: TokenKind,
pub text: String,
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "<{:?}, {:?}>", self.kind, self.text)
}
}
pub fn next_token(cs: &mut Chars) -> Token {
while let Some(c) = cs.next() {
match c {
'/' => {
let lex_comment_result = comment(cs);
match lex_comment_result {
Some(token) => return token,
None => {
eprintln!("Unexpected token found!");
panic!()
}
}
}
' ' | '\n' | '\r' | '\t' => {
let kind = TokenKind::TokenWhitespace;
let text = c.escape_default().to_string();
return Token { kind, text };
}
'+' => {
let kind = TokenKind::TokenAdd;
let text = c.to_string();
return Token { kind, text };
}
'-' => {
let kind = TokenKind::TokenSub;
let text = c.to_string();
return Token { kind, text };
}
'=' => {
let kind = TokenKind::TokenAssign;
let text = c.to_string();
return Token { kind, text };
}
'0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => {
let lex_num_result = num(c, cs);
match lex_num_result {
Some(token) => return token,
None => {
eprintln!("Unexpected token found!");
panic!()
}
}
}
'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o'
| 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z' | 'A' | 'B' | 'C' | 'D'
| 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S'
| 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z' => {
let lex_label_result = label(c, cs);
match lex_label_result {
Some(token) => return token,
None => {
eprintln!("Unexpected token found!");
panic!()
}
}
}
'(' => {
let kind = TokenKind::TokenLParen;
let text = c.to_string();
return Token { kind, text };
}
')' => {
let kind = TokenKind::TokenRParen;
let text = c.to_string();
return Token { kind, text };
}
';' => {
let kind = TokenKind::TokenJDivider;
let text = c.to_string();
return Token { kind, text };
}
'@' => {
let kind = TokenKind::TokenAddress;
let text = c.to_string();
return Token { kind, text };
}
_ => {
let kind = TokenKind::TokenLiteral;
let text = c.to_string();
return Token { kind, text };
}
};
}
let kind = TokenKind::TokenEOF;
let text = "EOF".to_string();
Token { kind, text }
}
fn comment(cs: &mut Chars) -> Option<Token> {
let mut tmp_iter = cs.clone().into_iter();
if tmp_iter.next() == Some('/') {
loop {
if cs.next() == Some('\n') {
break;
}
}
let kind = TokenKind::TokenComment;
let text = "//".to_string();
return Some(Token { kind, text });
}
None
}
fn num(c: char, cs: &mut Chars) -> Option<Token> {
match c.to_digit(10) {
Some(d) => {
let mut num: u32 = d;
let mut tmp_iter = cs.clone().into_iter();
while let Some(nc) = tmp_iter.next() {
match nc.to_digit(10) {
Some(nd) => {
num *= 10;
num += nd;
cs.next();
}
None => {
let kind = TokenKind::TokenNum;
let text = num.to_string();
return Some(Token { kind, text });
}
}
}
}
None => {}
}
None
}
// TODO: panic if invalid characters are entered
fn label(c: char, cs: &mut Chars) -> Option<Token> {
let mut text = c.to_string();
let mut tmp_iter = cs.clone().into_iter();
while let Some(nc) = tmp_iter.next() {
if nc.is_alphanumeric() || nc == '_' || nc == '.' || nc == '$' {
text.push(nc);
cs.next();
} else {
let kind = TokenKind::TokenLabel;
let text = text.to_string();
return Some(Token { kind, text });
}
}
None
}
#[cfg(test)]
mod tests {
use super::*;
fn token_match_test_runner(cs: &mut Chars, expected_tokens: Vec<Token>) {
let mut actual_tokens: Vec<Token> = Vec::new();
loop {
let tok = next_token(cs);
if tok.kind == TokenKind::TokenEOF {
break;
}
actual_tokens.push(tok);
}
assert_eq!(expected_tokens, actual_tokens);
}
#[test]
fn lex_comment() {
let ins:String = "//this is a comment\n".to_string();
let mut cs = ins.chars();
let mut expected_tokens: Vec<Token> = Vec::new();
expected_tokens.push(Token{kind: TokenKind::TokenComment, text: "//".to_string()});
token_match_test_runner(&mut cs, expected_tokens);
}
#[test]
fn lex_a_instruction_numeric_address() {
let ins:String = "@123\n".to_string();
let mut cs = ins.chars();
let mut expected_tokens: Vec<Token> = Vec::new();
expected_tokens.push(Token{kind: TokenKind::TokenAddress, text: "@".to_string()});
expected_tokens.push(Token{kind: TokenKind::TokenNum, text: "123".to_string()});
expected_tokens.push(Token{kind: TokenKind::TokenWhitespace, text: "\\n".to_string()});
token_match_test_runner(&mut cs, expected_tokens);
}
#[test]
fn lex_a_instruction_symbolic_address() {
let ins:String = "@this_is_4_t3$t_lAb3L\n".to_string();
let mut cs = ins.chars();
let mut expected_tokens: Vec<Token> = Vec::new();
expected_tokens.push(Token{kind: TokenKind::TokenAddress, text: "@".to_string()});
expected_tokens.push(Token{kind: TokenKind::TokenLabel, text: "this_is_4_t3$t_lAb3L".to_string()});
expected_tokens.push(Token{kind: TokenKind::TokenWhitespace, text: "\\n".to_string()});
token_match_test_runner(&mut cs, expected_tokens);
}
#[test]
fn lex_d_instruction_no_jump() {
let ins:String = "D=D-M\n".to_string();
let mut cs = ins.chars();
let mut expected_tokens: Vec<Token> = Vec::new();
expected_tokens.push(Token{kind: TokenKind::TokenLabel, text: "D".to_string()});
expected_tokens.push(Token{kind: TokenKind::TokenAssign, text: "=".to_string()});
expected_tokens.push(Token{kind: TokenKind::TokenLabel, text: "D".to_string()});
expected_tokens.push(Token{kind: TokenKind::TokenSub, text: "-".to_string()});
expected_tokens.push(Token{kind: TokenKind::TokenLabel, text: "M".to_string()});
expected_tokens.push(Token{kind: TokenKind::TokenWhitespace, text: "\\n".to_string()});
token_match_test_runner(&mut cs, expected_tokens);
}
#[test]
fn lex_d_instruction_jump() {
let ins:String = "D;JGT\n".to_string();
let mut cs = ins.chars();
let mut expected_tokens: Vec<Token> = Vec::new();
expected_tokens.push(Token{kind: TokenKind::TokenLabel, text: "D".to_string()});
expected_tokens.push(Token{kind: TokenKind::TokenJDivider, text: ";".to_string()});
expected_tokens.push(Token{kind: TokenKind::TokenLabel, text: "JGT".to_string()});
expected_tokens.push(Token{kind: TokenKind::TokenWhitespace, text: "\\n".to_string()});
token_match_test_runner(&mut cs, expected_tokens);
}
}
|
//! linux_raw syscalls supporting `rustix::runtime`.
//!
//! # Safety
//!
//! See the `rustix::backend` module documentation for details.
#![allow(unsafe_code)]
#![allow(clippy::undocumented_unsafe_blocks)]
use crate::backend::c;
#[cfg(target_arch = "x86")]
use crate::backend::conv::by_mut;
use crate::backend::conv::{
by_ref, c_int, c_uint, ret, ret_c_int, ret_c_int_infallible, ret_error, size_of, zero,
};
#[cfg(feature = "fs")]
use crate::fd::BorrowedFd;
use crate::ffi::CStr;
#[cfg(feature = "fs")]
use crate::fs::AtFlags;
use crate::io;
use crate::pid::Pid;
use crate::runtime::{How, Sigaction, Siginfo, Sigset, Stack};
use crate::signal::Signal;
use crate::timespec::Timespec;
use crate::utils::option_as_ptr;
use core::mem::MaybeUninit;
#[cfg(target_pointer_width = "32")]
use linux_raw_sys::general::__kernel_old_timespec;
use linux_raw_sys::general::kernel_sigset_t;
use linux_raw_sys::prctl::PR_SET_NAME;
#[cfg(target_arch = "x86_64")]
use {crate::backend::conv::ret_infallible, linux_raw_sys::general::ARCH_SET_FS};
#[inline]
pub(crate) unsafe fn fork() -> io::Result<Option<Pid>> {
let pid = ret_c_int(syscall_readonly!(
__NR_clone,
c_int(c::SIGCHLD),
zero(),
zero(),
zero(),
zero()
))?;
Ok(Pid::from_raw(pid))
}
#[cfg(feature = "fs")]
pub(crate) unsafe fn execveat(
dirfd: BorrowedFd<'_>,
path: &CStr,
args: *const *const u8,
env_vars: *const *const u8,
flags: AtFlags,
) -> io::Errno {
ret_error(syscall_readonly!(
__NR_execveat,
dirfd,
path,
args,
env_vars,
flags
))
}
pub(crate) unsafe fn execve(
path: &CStr,
args: *const *const u8,
env_vars: *const *const u8,
) -> io::Errno {
ret_error(syscall_readonly!(__NR_execve, path, args, env_vars))
}
pub(crate) mod tls {
use super::*;
#[cfg(target_arch = "x86")]
use crate::backend::runtime::tls::UserDesc;
#[cfg(target_arch = "x86")]
#[inline]
pub(crate) unsafe fn set_thread_area(u_info: &mut UserDesc) -> io::Result<()> {
ret(syscall!(__NR_set_thread_area, by_mut(u_info)))
}
#[cfg(target_arch = "arm")]
#[inline]
pub(crate) unsafe fn arm_set_tls(data: *mut c::c_void) -> io::Result<()> {
ret(syscall_readonly!(__ARM_NR_set_tls, data))
}
#[cfg(target_arch = "x86_64")]
#[inline]
pub(crate) unsafe fn set_fs(data: *mut c::c_void) {
ret_infallible(syscall_readonly!(
__NR_arch_prctl,
c_uint(ARCH_SET_FS),
data
))
}
#[inline]
pub(crate) unsafe fn set_tid_address(data: *mut c::c_void) -> Pid {
let tid: i32 = ret_c_int_infallible(syscall_readonly!(__NR_set_tid_address, data));
Pid::from_raw_unchecked(tid)
}
#[inline]
pub(crate) unsafe fn set_thread_name(name: &CStr) -> io::Result<()> {
ret(syscall_readonly!(__NR_prctl, c_uint(PR_SET_NAME), name))
}
#[inline]
pub(crate) fn exit_thread(code: c::c_int) -> ! {
unsafe { syscall_noreturn!(__NR_exit, c_int(code)) }
}
}
#[inline]
pub(crate) unsafe fn sigaction(signal: Signal, new: Option<Sigaction>) -> io::Result<Sigaction> {
let mut old = MaybeUninit::<Sigaction>::uninit();
let new = option_as_ptr(new.as_ref());
ret(syscall!(
__NR_rt_sigaction,
signal,
new,
&mut old,
size_of::<kernel_sigset_t, _>()
))?;
Ok(old.assume_init())
}
#[inline]
pub(crate) unsafe fn sigaltstack(new: Option<Stack>) -> io::Result<Stack> {
let mut old = MaybeUninit::<Stack>::uninit();
let new = option_as_ptr(new.as_ref());
ret(syscall!(__NR_sigaltstack, new, &mut old))?;
Ok(old.assume_init())
}
#[inline]
pub(crate) unsafe fn tkill(tid: Pid, sig: Signal) -> io::Result<()> {
ret(syscall_readonly!(__NR_tkill, tid, sig))
}
#[inline]
pub(crate) unsafe fn sigprocmask(how: How, new: Option<&Sigset>) -> io::Result<Sigset> {
let mut old = MaybeUninit::<Sigset>::uninit();
let new = option_as_ptr(new);
ret(syscall!(
__NR_rt_sigprocmask,
how,
new,
&mut old,
size_of::<kernel_sigset_t, _>()
))?;
Ok(old.assume_init())
}
#[inline]
pub(crate) fn sigwait(set: &Sigset) -> io::Result<Signal> {
unsafe {
match Signal::from_raw(ret_c_int(syscall_readonly!(
__NR_rt_sigtimedwait,
by_ref(set),
zero(),
zero(),
size_of::<kernel_sigset_t, _>()
))?) {
Some(signum) => Ok(signum),
None => Err(io::Errno::NOTSUP),
}
}
}
#[inline]
pub(crate) fn sigwaitinfo(set: &Sigset) -> io::Result<Siginfo> {
let mut info = MaybeUninit::<Siginfo>::uninit();
unsafe {
let _signum = ret_c_int(syscall!(
__NR_rt_sigtimedwait,
by_ref(set),
&mut info,
zero(),
size_of::<kernel_sigset_t, _>()
))?;
Ok(info.assume_init())
}
}
#[inline]
pub(crate) fn sigtimedwait(set: &Sigset, timeout: Option<Timespec>) -> io::Result<Siginfo> {
let mut info = MaybeUninit::<Siginfo>::uninit();
let timeout_ptr = option_as_ptr(timeout.as_ref());
// `rt_sigtimedwait_time64` was introduced in Linux 5.1. The old
// `rt_sigtimedwait` syscall is not y2038-compatible on 32-bit
// architectures.
#[cfg(target_pointer_width = "32")]
unsafe {
match ret_c_int(syscall!(
__NR_rt_sigtimedwait_time64,
by_ref(set),
&mut info,
timeout_ptr,
size_of::<kernel_sigset_t, _>()
)) {
Ok(_signum) => (),
Err(io::Errno::NOSYS) => sigtimedwait_old(set, timeout, &mut info)?,
Err(err) => return Err(err),
}
Ok(info.assume_init())
}
#[cfg(target_pointer_width = "64")]
unsafe {
let _signum = ret_c_int(syscall!(
__NR_rt_sigtimedwait,
by_ref(set),
&mut info,
timeout_ptr,
size_of::<kernel_sigset_t, _>()
))?;
Ok(info.assume_init())
}
}
#[cfg(target_pointer_width = "32")]
unsafe fn sigtimedwait_old(
set: &Sigset,
timeout: Option<Timespec>,
info: &mut MaybeUninit<Siginfo>,
) -> io::Result<()> {
let old_timeout = match timeout {
Some(timeout) => Some(__kernel_old_timespec {
tv_sec: timeout.tv_sec.try_into().map_err(|_| io::Errno::OVERFLOW)?,
tv_nsec: timeout.tv_nsec as _,
}),
None => None,
};
let old_timeout_ptr = option_as_ptr(old_timeout.as_ref());
let _signum = ret_c_int(syscall!(
__NR_rt_sigtimedwait,
by_ref(set),
info,
old_timeout_ptr,
size_of::<kernel_sigset_t, _>()
))?;
Ok(())
}
#[inline]
pub(crate) fn exit_group(code: c::c_int) -> ! {
unsafe { syscall_noreturn!(__NR_exit_group, c_int(code)) }
}
|
use frame_system as system;
use frame_support::assert_ok;
use move_core_types::identifier::Identifier;
use move_core_types::language_storage::ModuleId;
use move_core_types::language_storage::StructTag;
use move_vm::data::*;
use move_vm_runtime::data_cache::RemoteCache;
use serde::Deserialize;
use sp_mvm::storage::MoveVmStorage;
mod common;
use common::assets::*;
use common::mock::*;
use common::utils::*;
#[derive(Deserialize)]
struct StoreU64 {
pub val: u64,
}
fn call_publish_module(signer: <Test as system::Trait>::AccountId, bc: Vec<u8>, mod_name: &str) {
let origin = Origin::signed(signer);
// execute VM for publish module:
let result = Mvm::publish(origin, bc.clone());
eprintln!("publish_module result: {:?}", result);
assert_ok!(result);
// check storage:
let module_id = ModuleId::new(to_move_addr(signer), Identifier::new(mod_name).unwrap());
let storage = Mvm::move_vm_storage();
let oracle = MockOracle(None);
let state = State::new(storage, oracle);
assert_eq!(bc, state.get_module(&module_id).unwrap().unwrap());
}
fn call_execute_script(origin: Origin) {
let txbc = UserTx::StoreU64.bc().to_vec();
// execute VM tx:
let result = Mvm::execute(origin, txbc);
eprintln!("execute_script result: {:?}", result);
assert_ok!(result);
// check storage:
let store = Mvm::move_vm_storage();
let oracle = MockOracle(None);
let state = State::new(store, oracle);
let tag = StructTag {
address: origin_move_addr(),
module: Identifier::new(UserMod::Store.name()).unwrap(),
name: Identifier::new("U64").unwrap(),
type_params: vec![],
};
let blob = state
.get_resource(&origin_move_addr(), &tag)
.unwrap()
.unwrap();
let store: StoreU64 = lcs::from_bytes(&blob).unwrap();
assert_eq!(42, store.val);
}
#[test]
fn publish_module() {
new_test_ext().execute_with(|| {
let origin = origin_ps_acc();
let module = UserMod::Store;
call_publish_module(origin, module.bc().to_vec(), module.name());
});
}
#[test]
fn execute_script() {
new_test_ext().execute_with(|| {
let origin = origin_ps_acc();
let signer = Origin::signed(origin);
let module = UserMod::Store;
call_publish_module(origin, module.bc().to_vec(), module.name());
call_execute_script(signer);
});
}
|
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use alloc::collections::btree_map::BTreeMap;
use alloc::vec::Vec;
use spin::Mutex;
use alloc::sync::Arc;
use core::ops::Deref;
use super::super::qlib::linux_def::*;
use super::super::qlib::common::*;
use super::super::fs::file::*;
use super::super::id_mgr::*;
#[derive(Clone, Default, Debug)]
pub struct FDFlags {
pub CloseOnExec: bool,
}
impl FDFlags {
pub fn ToLinuxFileFlags(&self) -> i32 {
if self.CloseOnExec {
return Flags::O_CLOEXEC
}
return 0;
}
pub fn ToLinuxFDFlags(&self) -> u32 {
if self.CloseOnExec {
return LibcConst::FD_CLOEXEC as u32
}
return 0;
}
}
#[derive(Clone)]
pub struct Descriptor {
pub file: File,
pub flags: FDFlags,
}
#[derive(Clone, Default)]
pub struct FDTable((Arc<Mutex<FDTableInternal>>, u64));
impl Deref for FDTable {
type Target = Arc<Mutex<FDTableInternal>>;
fn deref(&self) -> &Arc<Mutex<FDTableInternal>> {
&(self.0).0
}
}
impl FDTable {
pub fn ID(&self) -> u64 {
return (self.0).1;
}
pub fn Fork(&self) -> FDTable {
let internal = self.lock().Fork();
return FDTable((Arc::new(Mutex::new(internal)), UniqueID()));
}
pub fn Clear(&self) {
self.lock().descTbl.clear();
}
pub fn Count(&self) -> usize {
return self.lock().descTbl.len();
}
pub fn RefCount(&self) -> usize {
return Arc::strong_count(&(self.0).0)
}
}
pub struct FDTableInternal {
pub next: i32,
pub descTbl: BTreeMap<i32, Descriptor>,
}
impl Default for FDTableInternal {
fn default() -> Self {
return Self::New();
}
}
impl FDTableInternal {
pub fn New() -> Self {
return Self {
next: 0,
descTbl: BTreeMap::new(),
}
}
pub fn Print(&self) {
for (id, d) in &self.descTbl {
info!("FDTableInternal::Print [{}], refcount is {}, id is {}",
*id, Arc::strong_count(&d.file.0), d.file.0.UniqueId)
}
}
pub fn Size(&self) -> usize {
return self.descTbl.len();
}
fn set(&mut self, fd: i32, file: &File, flags: &FDFlags) {
let fdesc = Descriptor {
file: file.clone(),
flags: flags.clone(),
};
self.descTbl.insert(fd, fdesc);
}
pub fn NewFDFrom(&mut self, fd: i32, file: &File, flags: &FDFlags) -> Result<i32> {
let fds = self.NewFDs(fd, &[file.clone()], flags)?;
return Ok(fds[0])
}
pub fn NewFDs(&mut self, fd: i32, files: &[File], flags: &FDFlags) -> Result<Vec<i32>> {
if fd < 0 {
return Err(Error::SysError(SysErr::EINVAL))
}
let mut fd = fd;
if fd < self.next {
fd = self.next;
}
let end = core::i32::MAX;
let mut fds = Vec::new();
let mut i = fd;
while i < end && fds.len() < files.len() {
let fd = self.descTbl.get(&i);
match fd {
None => {
self.set(i, &files[fds.len()], flags);
fds.push(i);
}
_ => ()
}
i += 1;
}
//fail, undo the change
if fds.len() < files.len() {
for i in &fds {
self.descTbl.remove(i);
}
return Err(Error::SysError(SysErr::EMFILE))
}
if fd == self.next {
self.next = fds[fds.len() - 1] + 1;
}
return Ok(fds)
}
pub fn NewFDAt(&mut self, fd: i32, file: &File, flags: &FDFlags) -> Result<()> {
if fd < 0 {
return Err(Error::SysError(SysErr::EBADF))
}
self.set(fd, file, flags);
return Ok(())
}
pub fn Dup(&mut self, fd: i32) -> Result<i32> {
if fd < 0 {
return Err(Error::SysError(SysErr::EBADF))
}
let (f, flags) = self.Get(fd)?;
return self.NewFDFrom(0, &f, &flags);
}
pub fn Dup2(&mut self, oldfd: i32, newfd: i32) -> Result<i32> {
if oldfd < 0 {
return Err(Error::SysError(SysErr::EBADF))
}
if newfd < 0 {
return Err(Error::SysError(SysErr::EBADF))
}
self.Remove(newfd);
let (f, flags) = self.Get(oldfd)?;
self.NewFDAt(newfd, &f, &flags)?;
return Ok(newfd)
}
pub fn Dup3(&mut self, oldfd: i32, newfd: i32, flags: i32) -> Result<i32> {
if oldfd < 0 {
return Err(Error::SysError(SysErr::EBADF))
}
if newfd < 0 {
return Err(Error::SysError(SysErr::EBADF))
}
self.Remove(newfd);
let closeOnExec = Flags(flags).CloseOnExec();
let (f, mut flags) = self.Get(oldfd)?;
flags.CloseOnExec = closeOnExec;
self.NewFDAt(newfd, &f, &flags)?;
return Ok(newfd)
}
pub fn SetFlags(&mut self, fd: i32, flags: &FDFlags) -> Result<()> {
if fd < 0 {
return Err(Error::SysError(SysErr::EBADF))
}
let file = self.descTbl.get_mut(&fd);
match file {
None => return Err(Error::SysError(SysErr::EBADF)),
Some(fdesc) => fdesc.flags = flags.clone(),
}
return Ok(())
}
pub fn GetFDs(&self) -> Vec<i32> {
let mut fds = Vec::with_capacity(self.descTbl.len());
for (fd, _) in &self.descTbl {
fds.push(*fd)
}
return fds;
}
pub fn GetFiles(&self) -> Vec<File> {
let mut files = Vec::with_capacity(self.descTbl.len());
for (_, file) in &self.descTbl {
files.push(file.file.clone())
}
return files;
}
pub fn Get(&self, fd: i32) -> Result<(File, FDFlags)> {
let f = self.descTbl.get(&fd);
match f {
None => return Err(Error::SysError(SysErr::EBADF)),
Some(f) => Ok((f.file.clone(), f.flags.clone())),
}
}
pub fn Fork(&self) -> FDTableInternal {
let mut tbl = FDTableInternal {
next: self.next,
descTbl: BTreeMap::new(),
};
for (fd, file) in &self.descTbl {
tbl.set(*fd, &file.file, &file.flags)
}
return tbl
}
pub fn Remove(&mut self, fd: i32) -> Option<File> {
if fd < 0 {
return None;
}
if fd < self.next {
self.next = fd;
}
let file = self.descTbl.remove(&fd);
match file {
None => return None,
Some(f) => return Some(f.file)
}
}
pub fn RemoveCloseOnExec(&mut self) {
let mut removed = Vec::new();
for (fd, desc) in &self.descTbl {
if desc.flags.CloseOnExec {
removed.push(*fd);
}
}
for fd in &removed {
let desc = self.descTbl.remove(fd).unwrap();
inotifyFileClose(&desc.file);
}
}
pub fn RemoveAll(&mut self) {
let mut removed = Vec::new();
for (fd, _) in &self.descTbl {
removed.push(*fd);
}
for fd in &removed {
let desc = self.descTbl.remove(fd).unwrap();
inotifyFileClose(&desc.file);
}
}
}
pub fn inotifyFileClose(_f: &File) {
//todo: will implement it later
} |
use std::collections::HashSet;
use serde::{Deserialize, Serialize};
use crate::data::CrateData;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Config {
pub categories: Vec<Category>,
/// The whitelist of Category crates
pub crates: Vec<Crate>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
// TODO: implement crates overrides, like:
// - repository
// - wiki?!
pub struct Crate {
#[serde(flatten)]
pub crate_data: CrateData,
pub categories: HashSet<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Category {
/// The Category name
pub name: String,
#[serde(default)]
pub description: String,
pub icon: String,
/// The crates.io canonical category slug
/// if such category exist on crates.io
pub canonical_slug: Option<String>,
/// The crates that should be included
/// or excluded in the category.
#[serde(flatten)]
pub filtered: FilteredCrates,
}
/// The list of whitelisted and blacklisted crates
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FilteredCrates {
/// Included in the category, because they don't have the
/// set category.
/// This is populated after loading the categories from config.
#[serde(default, skip_deserializing)]
pub whitelist: HashSet<String>,
/// Excluded from the category, because they are placed in
/// the category wrongly.
#[serde(default)]
pub blacklist: HashSet<String>,
}
|
pub mod typing;
|
#[macro_use]
extern crate bench_utils;
#[cfg(any(
feature = "commitment",
feature = "merkle_tree",
feature = "prf",
feature = "signature",
feature = "vrf"
))]
#[macro_use]
extern crate derivative;
pub mod crh;
pub use self::crh::*;
#[cfg(feature = "commitment")]
pub mod commitment;
#[cfg(feature = "commitment")]
pub use self::commitment::*;
#[cfg(feature = "merkle_tree")]
pub mod merkle_tree;
#[cfg(feature = "merkle_tree")]
pub use self::merkle_tree::*;
#[cfg(feature = "prf")]
pub mod prf;
#[cfg(feature = "prf")]
pub use self::prf::*;
#[cfg(feature = "signature")]
pub mod signature;
#[cfg(feature = "signature")]
pub use self::signature::*;
#[cfg(feature = "vrf")]
pub mod vrf;
#[cfg(feature = "vrf")]
pub use self::vrf::*;
pub type Error = Box<dyn std::error::Error>;
#[derive(Debug)]
pub enum CryptoError {
IncorrectInputLength(String, usize),
InvalidElement(String),
NotPrimeOrder(String),
FailedVerification,
}
impl std::fmt::Display for CryptoError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let msg = match self {
CryptoError::IncorrectInputLength(elem, len) => format!("{} length is wrong: {}", elem, len),
CryptoError::InvalidElement(elem) => format!("{} is invalid", elem),
CryptoError::NotPrimeOrder(elem) => format!("element {} is not prime order", elem),
CryptoError::FailedVerification => "verification failed".to_owned(),
};
write!(f, "{}", msg)
}
}
impl std::error::Error for CryptoError {
#[inline]
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
}
/// Return the number of leading bits to skip in a field element belonging to a field
/// 'from' having `modulus_from` bits in order to safely convert it into a field element
/// belonging to a field 'to' having `modulus_to` bits.
pub fn compute_truncation_size(modulus_from: i32, modulus_to: i32) -> usize {
(match modulus_from - modulus_to {
moduli_diff if moduli_diff > 0 => moduli_diff + 1,
moduli_diff if moduli_diff == 0 => 1,
moduli_diff if moduli_diff < 0 => 0,
_ => unreachable!(),
}) as usize
}
use algebra::{
PrimeField, FpParameters,
};
/// Return the number of bytes to skip in a little-endian byte order representation
/// of a field element belonging to field `F`.
#[allow(dead_code)]
pub fn compute_bytes_truncation_size<F: PrimeField>() -> usize {
let bigint_bytes = (F::Params::MODULUS_BITS + F::Params::REPR_SHAVE_BITS)/8;
let safe_bytes = F::Params::CAPACITY/8;
(bigint_bytes - safe_bytes) as usize
} |
//!
//!
//! Create a associated type for traits and trait implementations.
//!
use serde::{Deserialize, Serialize};
use tera::{Context, Tera};
use crate::traits::SrcCode;
use crate::{internal, Attribute, SrcCodeVec};
/// Represent the declaration of a associated type in a trait
#[derive(Serialize, Deserialize, Default, Clone)]
pub struct AssociatedTypeDeclaration {
name: String,
traits: Vec<String>,
attributes: Vec<Attribute>,
}
impl AssociatedTypeDeclaration {
/// Create a new `AssociatedTypeDeclaration`
pub fn new(name: impl ToString) -> Self {
Self {
name: name.to_string(),
..Self::default()
}
}
}
impl internal::Attributes for AssociatedTypeDeclaration {
fn attributes_mut(&mut self) -> &mut Vec<Attribute> {
&mut self.attributes
}
}
impl internal::TraitBounds for AssociatedTypeDeclaration {
fn trait_bounds_mut(&mut self) -> &mut Vec<String> {
&mut self.traits
}
}
impl SrcCode for AssociatedTypeDeclaration {
fn generate(&self) -> String {
let template = r#"
{{ attributes | join(sep="
") }}
type {{ self.name }}{% if has_traits %}: {{ self.traits | join(sep=" + ") }}{% endif %};
"#;
let mut context = Context::new();
context.insert("self", &self);
context.insert("has_traits", &!self.traits.is_empty());
context.insert("attributes", &self.attributes.to_src_vec());
Tera::one_off(template, &context, false).unwrap()
}
}
/// Represent the definition of a associated type in a trait implementation
#[derive(Serialize, Deserialize, Default, Clone)]
pub struct AssociatedTypeDefinition {
name: String,
implementer: String,
attributes: Vec<Attribute>,
}
impl AssociatedTypeDefinition {
/// Create a new `AssociatedTypeDefinition`
pub fn new(name: impl ToString, implementer: impl ToString) -> Self {
AssociatedTypeDefinition {
name: name.to_string(),
implementer: implementer.to_string(),
..Self::default()
}
}
}
impl internal::Attributes for AssociatedTypeDefinition {
fn attributes_mut(&mut self) -> &mut Vec<Attribute> {
&mut self.attributes
}
}
impl SrcCode for AssociatedTypeDefinition {
fn generate(&self) -> String {
let template = r#"
{{ attributes | join(sep="
") }}
type {{ self.name }} = {{ self.implementer }};
"#;
let mut context = Context::new();
context.insert("self", &self);
context.insert("attributes", &self.attributes.to_src_vec());
Tera::one_off(template, &context, false).unwrap()
}
}
|
use crate::error::Result;
use crate::proto::{Proto, Request};
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::rc::Rc;
use std::time::Duration;
pub trait Wlan {
fn get_scan_info(
&mut self,
refresh: bool,
timeout: Option<Duration>,
) -> Result<Vec<AccessPoint>>;
}
pub(crate) struct Netif {
ns: String,
proto: Rc<Proto>,
}
impl Netif {
pub(crate) fn new(proto: Rc<Proto>) -> Netif {
Netif {
ns: String::from("netif"),
proto,
}
}
pub(crate) fn get_scan_info(
&self,
refresh: bool,
timeout: Option<Duration>,
) -> Result<Vec<AccessPoint>> {
let refresh = if refresh { 1 } else { 0 };
// Note: If scan timeout is greater than proto's read timeout,
// the method returns with an ErrorKind::WouldBlock error.
let timeout = timeout.map_or(
self.proto.read_timeout().map_or(3, |to| to.as_secs()),
|duration| duration.as_secs(),
);
let response = self.proto.send_request(&Request::new(
&self.ns,
"get_scaninfo",
Some(json!({ "refresh": refresh, "timeout": timeout })),
))?;
log::trace!("{:?}", response);
Ok(serde_json::from_value::<AccessPointList>(response)
.map(|response| response.ap_list)
.unwrap_or_else(|err| {
panic!(
"invalid response from host with address {}: {}",
self.proto.host(),
err
)
}))
}
}
#[derive(Debug, Serialize, Deserialize)]
struct AccessPointList {
ap_list: Vec<AccessPoint>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct AccessPoint {
ssid: String,
key_type: u32,
}
impl AccessPoint {
pub fn ssid(&self) -> &str {
&self.ssid
}
pub fn key_type(&self) -> u32 {
self.key_type
}
}
|
use rustwlc::{Point, ResizeEdge};
use uuid::Uuid;
use petgraph::graph::NodeIndex;
use super::super::LayoutTree;
use super::super::commands::CommandResult;
use super::super::core::{Direction, ShiftDirection, TreeError};
use super::super::core::container::{Container, ContainerType, ContainerErr,
Handle, Layout};
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum MovementError {
/// Attempted to move the node behind the UUID in the given direction,
/// which would cause it to leave its siblings.
MoveOutsideSiblings(Uuid, Direction),
/// There was a tree error, generally should abort operation and pass this
/// up back through the caller.
Tree(Box<TreeError>),
/// Expected the view to be floating, but it was not
NotFloating(NodeIndex)
}
impl From<ContainerErr> for MovementError {
fn from(err: ContainerErr) -> MovementError {
MovementError::Tree(Box::new(err.into()))
}
}
impl LayoutTree {
/// Will attempt to move the container at the UUID in the given direction.
pub fn move_container(&mut self, uuid: Uuid, direction: Direction) -> CommandResult {
let node_ix = self.tree.lookup_id(uuid)
.ok_or(TreeError::NodeNotFound(uuid))?;
let old_parent_ix = self.tree.parent_of(node_ix)
.map_err(|err| TreeError::PetGraph(err))?;
let new_parent_ix = self.move_recurse(node_ix, None, direction)?;
if self.tree.can_remove_empty_parent(old_parent_ix) {
self.remove_container(old_parent_ix)?;
}
// NOTE refresh node_ix because it probably moved.
let node_ix = self.tree.lookup_id(uuid)
.ok_or(TreeError::NodeNotFound(uuid))?;
let draw_title = match self.tree[new_parent_ix].get_layout()? {
Layout::Tabbed | Layout::Stacked => false,
Layout::Horizontal | Layout::Vertical => true
};
match self.tree[node_ix] {
Container::View { ref mut borders, .. } => {
borders.as_mut().map(|b| b.draw_title = draw_title);
},
_ => {}
}
self.layout(new_parent_ix);
self.tree[node_ix].draw_borders()?;
self.validate();
Ok(())
}
/// Returns the new parent of the active container if the move succeeds,
/// Otherwise it signals what error occurred in the tree.
fn move_recurse(&mut self, node_to_move: NodeIndex, move_ancestor: Option<NodeIndex>,
direction: Direction) -> Result<NodeIndex, TreeError> {
match self.tree[node_to_move].get_type() {
ContainerType::View | ContainerType::Container => { /* continue */ },
_ => return Err(TreeError::UuidWrongType(self.tree[node_to_move].get_id(),
vec!(ContainerType::View,
ContainerType::Container)))
}
let parent_ix = try!(
match move_ancestor {
Some(node) => self.tree.parent_of(node),
None => self.tree.parent_of(node_to_move)
}.map_err(|err| TreeError::PetGraph(err)));
match self.tree[parent_ix] {
Container::Container { layout, .. } => {
match (layout, direction) {
(Layout::Horizontal, Direction::Left) |
(Layout::Horizontal, Direction::Right) |
(Layout::Tabbed, Direction::Left) |
(Layout::Tabbed, Direction::Right) |
(Layout::Stacked, Direction::Up) |
(Layout::Stacked, Direction::Down) |
(Layout::Vertical, Direction::Up) |
(Layout::Vertical, Direction::Down) => {
if let Some(ancestor_ix) = move_ancestor {
match self.move_between_ancestors(node_to_move, ancestor_ix, direction) {
Ok(new_parent_ix) => Ok(new_parent_ix),
Err(MovementError::Tree(err)) => {
Err(*err)
}
Err(MovementError::MoveOutsideSiblings(node, dir)) => {
error!("Trying to move {:#?} in the {:?} direction somehow moved out of siblings",
node, dir);
panic!("Moving between ancestors failed in an unexpected way")
},
err => {
error!("Unexpected error: {:#?}", err);
panic!("unexpected error");
}
}
} else { /* Moving within current parent container */
match self.move_within_container(node_to_move, direction) {
Ok(new_parent_ix) => {
Ok(new_parent_ix)
},
Err(MovementError::MoveOutsideSiblings(_,_)) => {
self.move_recurse(node_to_move, Some(parent_ix), direction)
},
Err(MovementError::Tree(err)) => {
Err(*err)
},
err => {
error!("Unexpected error: {:#?}", err);
panic!("unexpected error");
}
}
}
},
_ => { self.move_recurse(node_to_move, Some(parent_ix), direction) }
}
},
Container::Workspace { .. } => {
Err(TreeError::InvalidOperationOnRootContainer(self.tree[node_to_move].get_id()))
}
_ => unreachable!()
}
}
/// Attempt to move a container at the node index in the given direction.
///
/// If the node would move outside of its current container by moving in that
/// direction, then MovementError::MoveOutsideSiblings is returned.
/// If the tree state is invalid, an appropriate wrapped up error is returned.
///
/// If successful, the parent index of the node is returned.
fn move_within_container(&mut self, node_ix: NodeIndex, direction: Direction)
-> Result<NodeIndex, MovementError> {
let parent_ix = try!(self.tree.parent_of(node_ix)
.map_err(|err| MovementError::Tree(
Box::new(TreeError::PetGraph(err)))));
let siblings_and_self = self.tree.children_of(parent_ix);
let cur_index = try!(siblings_and_self.iter().position(|node| {
*node == node_ix
}).ok_or(MovementError::Tree(
Box::new(TreeError::NodeNotFound(self.tree[node_ix].get_id())))));
let maybe_new_index = match direction {
Direction::Right | Direction::Down => {
cur_index.checked_add(1)
}
Direction::Left | Direction::Up => {
cur_index.checked_sub(1)
}
};
if maybe_new_index.is_some() && maybe_new_index.unwrap() < siblings_and_self.len() {
// There is a sibling to swap with
let swap_index = maybe_new_index.unwrap();
let swap_ix = siblings_and_self[swap_index];
match self.tree[swap_ix] {
Container::View { .. } => {
try!(self.tree.swap_node_order(node_ix, swap_ix)
.map_err(|err| MovementError::Tree(
Box::new(TreeError::PetGraph(err)))))
},
Container::Container { .. } => {
try!(self.tree.move_into(node_ix, swap_ix)
.map_err(|err| MovementError::Tree(
Box::new(TreeError::PetGraph(err)))));
match self.tree[node_ix].get_handle()? {
Handle::View(view) => self.normalize_view(view)
.map_err(|err| MovementError::Tree(Box::new(err)))?,
_ => unreachable!()
}
},
_ => return Err(MovementError::Tree(
Box::new(TreeError::UuidWrongType(self.tree[swap_ix].get_id(),
vec!(ContainerType::View, ContainerType::Container)))))
};
Ok(self.tree.parent_of(node_ix)
.expect("Moved container had no new parent"))
} else {
// Tried to move outside the limit
Err(MovementError::MoveOutsideSiblings(self.tree[node_ix].get_id(), direction))
}
}
/// Moves the node in the direction, outside to ancestor siblings.
///
/// Returns the new parent of the node on success
///
/// This should only be called by the recursive function.
fn move_between_ancestors(&mut self,
node_to_move: NodeIndex,
move_ancestor: NodeIndex,
direction: Direction)
-> Result<NodeIndex, MovementError> {
let cur_parent_ix = try!(self.tree.parent_of(move_ancestor)
.map_err(|err| MovementError::Tree(
Box::new(TreeError::PetGraph(err)))));
let siblings_and_self = self.tree.children_of(cur_parent_ix);
let cur_index = try!(siblings_and_self.iter().position(|node| {
*node == move_ancestor
}).ok_or(MovementError::Tree(
Box::new(TreeError::NodeNotFound(self.tree[move_ancestor].get_id())))));
let next_ix = match direction {
Direction::Right | Direction::Down => {
let next_index = cur_index + 1;
if next_index as usize >= siblings_and_self.len() {
return self.tree.add_to_end(node_to_move,
siblings_and_self[siblings_and_self.len() - 1],
ShiftDirection::Left)
.and_then(|_| self.tree.parent_of(node_to_move))
.map_err(|err| MovementError::Tree(
Box::new(TreeError::PetGraph(err))))
} else {
siblings_and_self[next_index]
}
},
Direction::Left | Direction::Up => {
if let Some(next_index) = cur_index.checked_sub(1) {
siblings_and_self[next_index]
} else {
return self.tree.add_to_end(node_to_move,
siblings_and_self[0],
ShiftDirection::Right)
.and_then(|_| self.tree.parent_of(node_to_move))
.map_err(|err| MovementError::Tree(
Box::new(TreeError::PetGraph(err))))
}
}
};
// Replace ancestor location with the node we are moving,
// shifts the others over
let parent_ix = try!(match self.tree[next_ix] {
Container::View { .. } => {
match direction {
Direction::Right | Direction::Down => {
self.tree.place_node_at(node_to_move, next_ix, ShiftDirection::Left)
},
Direction::Left | Direction::Up => {
self.tree.place_node_at(node_to_move, next_ix, ShiftDirection::Right)
}
}
},
Container::Container { .. } => {
self.tree.move_into(node_to_move, next_ix)
},
_ => unreachable!()
}.map_err(|err| MovementError::Tree(Box::new(TreeError::PetGraph(err)))));
match self.tree[node_to_move] {
Container::View { handle, .. } => {
self.normalize_view(handle)
.map_err(|err| MovementError::Tree(
Box::new(err)))?;
Ok(parent_ix)
},
_ => {
Err(MovementError::Tree(
Box::new(
TreeError::UuidWrongType(self.tree[node_to_move].get_id(),
vec!(ContainerType::View)))))
},
}
}
/// If the view behind the node index is floating, drags move it to a
/// point on the screen.
pub fn drag_floating(&mut self, node_ix: NodeIndex, point: Point, old_point: Point)
-> CommandResult {
let container = &mut self.tree[node_ix];
if !container.floating() {
return Err(TreeError::Movement(MovementError::NotFloating(node_ix)))
}
match *container {
Container::View { handle, ref mut effective_geometry, .. } => {
let dx = point.x - old_point.x;
let dy = point.y - old_point.y;
let mut geo = handle.get_geometry()
.expect("Could not get geometry of view");
geo.origin.x += dx;
geo.origin.y += dy;
handle.set_geometry(ResizeEdge::empty(), geo);
effective_geometry.origin = geo.origin;
},
Container::Container { id, .. } | Container::Workspace { id, .. } |
Container::Output { id, .. } | Container::Root(id) => {
return Err(TreeError::UuidWrongType(id, vec!(ContainerType::View)))
}
}
container.draw_borders()?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::super::super::core::tree::tests::basic_tree;
use super::super::super::{Direction, Container, ContainerType, Layout};
use rustwlc::*;
#[test]
fn test_basic_move() {
let mut tree = basic_tree();
tree.add_view(WlcView::root()).unwrap();
let active_uuid = tree.get_active_container().unwrap().get_id();
let active_parent = tree.tree.parent_of(tree.active_container.unwrap()).unwrap();
let children = tree.tree.children_of(active_parent);
assert_eq!(children[1], tree.active_container.unwrap());
// These should do nothing, moving in wrong direction
assert!(tree.move_container(active_uuid, Direction::Up).is_err());
assert!(tree.move_container(active_uuid, Direction::Down).is_err());
assert!(tree.move_container(active_uuid, Direction::Right).is_err());
// test going left and right works
assert!(tree.move_container(active_uuid, Direction::Left).is_ok());
let children = tree.tree.children_of(active_parent);
assert_eq!(children[0], tree.active_container.unwrap());
assert!(tree.move_container(active_uuid, Direction::Right).is_ok());
let children = tree.tree.children_of(active_parent);
assert_eq!(children[1], tree.active_container.unwrap());
// test going up and down works
let id = tree.get_active_container().unwrap().get_id();
tree.toggle_cardinal_tiling(id).unwrap();
assert!(tree.move_container(active_uuid, Direction::Up).is_ok());
let children = tree.tree.children_of(active_parent);
assert_eq!(children[0], tree.active_container.unwrap());
assert!(tree.move_container(active_uuid, Direction::Down).is_ok());
let children = tree.tree.children_of(active_parent);
assert_eq!(children[1], tree.active_container.unwrap());
}
#[test]
fn test_move_into_sub_container_dif_layout() {
let mut tree = basic_tree();
tree.switch_to_workspace("2");
let active_parent = tree.tree.parent_of(tree.active_container.unwrap()).unwrap();
let children = tree.tree.children_of(active_parent);
assert_eq!(Some(children[0]), tree.active_container);
// make the first view have a vertical layout
tree.toggle_active_layout(Layout::Vertical).unwrap();
tree.active_container = Some(children[1]);
let active_parent = tree.tree.parent_of(tree.active_container.unwrap()).unwrap();
let children = tree.tree.children_of(active_parent);
let active_uuid = tree.get_active_container().unwrap().get_id();
// make sure the first container is the sub container, second is the view
assert_eq!(tree.tree[children[0]].get_type(), ContainerType::Container);
assert_eq!(tree.tree[children[1]].get_type(), ContainerType::View);
assert!(tree.move_container(active_uuid, Direction::Left).is_ok());
let active_parent = tree.tree.parent_of(tree.active_container.unwrap()).unwrap();
let children = tree.tree.children_of(active_parent);
// we should all be in the same container now, in the vertical one
assert_eq!(children.len(), 2);
match tree.tree[active_parent] {
Container::Container { ref layout, .. } => {
assert_eq!(*layout, Layout::Vertical);
}
_ => panic!("Parent of active was not a vertical container")
}
}
#[test]
fn test_move_into_sub_container_same_layout() {
let mut tree = basic_tree();
tree.switch_to_workspace("2");
let active_parent = tree.tree.parent_of(tree.active_container.unwrap()).unwrap();
let children = tree.tree.children_of(active_parent);
assert_eq!(Some(children[0]), tree.active_container);
// make the first view have a vertical layout
tree.toggle_active_layout(Layout::Horizontal).unwrap();
let horizontal_id = tree.tree[tree.tree.parent_of(tree.active_container.unwrap()).unwrap()].get_id();
tree.active_container = Some(children[1]);
let active_parent = tree.tree.parent_of(tree.active_container.unwrap()).unwrap();
let children = tree.tree.children_of(active_parent);
let active_uuid = tree.get_active_container().unwrap().get_id();
// make sure the first container is the sub container, second is the view
assert_eq!(tree.tree[children[0]].get_type(), ContainerType::Container);
assert_eq!(tree.tree[children[1]].get_type(), ContainerType::View);
assert!(tree.move_container(active_uuid, Direction::Left).is_ok());
let active_parent = tree.tree.parent_of(tree.active_container.unwrap()).unwrap();
let children = tree.tree.children_of(active_parent);
// we should all be in the same container now, in the sub horizontal one
assert_eq!(children.len(), 2);
match tree.tree[active_parent] {
Container::Container { ref layout, ref id, .. } => {
assert_eq!(*layout, Layout::Horizontal);
assert_eq!(*id, horizontal_id);
}
_ => panic!("Parent of active was not a vertical container")
}
}
#[test]
fn test_move_against_edges() {
let mut tree = basic_tree();
tree.switch_to_workspace("2");
// move the containers into one sub-vertical container, so we can test moving
// to the right and left outside this container
{
let active_parent = tree.tree.parent_of(tree.active_container.unwrap()).unwrap();
let children = tree.tree.children_of(active_parent);
assert_eq!(Some(children[0]), tree.active_container);
// make the first view have a vertical layout
tree.toggle_active_layout(Layout::Horizontal).unwrap();
tree.active_container = Some(children[1]);
let active_parent = tree.tree.parent_of(tree.active_container.unwrap()).unwrap();
let children = tree.tree.children_of(active_parent);
let active_uuid = tree.get_active_container().unwrap().get_id();
// make sure the first container is the sub container, second is the view
assert_eq!(tree.tree[children[0]].get_type(), ContainerType::Container);
assert_eq!(tree.tree[children[1]].get_type(), ContainerType::View);
assert!(tree.move_container(active_uuid, Direction::Left).is_ok());
}
let active_ix = tree.active_container.unwrap();
let active_id = tree.tree[active_ix].get_id();
let active_parent = tree.tree.parent_of(tree.active_container.unwrap()).unwrap();
let children = tree.tree.children_of(active_parent);
assert_eq!(Some(children[1]), tree.active_container);
assert!(tree.move_container(active_id, Direction::Right).is_ok());
let active_parent = tree.tree.parent_of(tree.active_container.unwrap()).unwrap();
let children = tree.tree.children_of(active_parent);
// Should only be the moved child here and the vertical container
assert_eq!(tree.tree[children[0]].get_type(), ContainerType::Container);
assert_eq!(tree.tree[children[1]].get_type(), ContainerType::View);
// move it back
assert!(tree.move_container(active_id, Direction::Left).is_ok());
let active_parent = tree.tree.parent_of(tree.active_container.unwrap()).unwrap();
let children = tree.tree.children_of(active_parent);
assert_eq!(tree.tree[children[0]].get_type(), ContainerType::View);
assert_eq!(tree.tree[children[1]].get_type(), ContainerType::View);
// Do it to the left now
assert!(tree.move_container(active_id, Direction::Left).is_ok());
assert!(tree.move_container(active_id, Direction::Left).is_ok());
let active_parent = tree.tree.parent_of(tree.active_container.unwrap()).unwrap();
let children = tree.tree.children_of(active_parent);
// Should only be the moved child here and the vertical container
assert_eq!(tree.tree[children[0]].get_type(), ContainerType::View);
assert_eq!(tree.tree[children[1]].get_type(), ContainerType::Container);
assert!(tree.move_container(active_id, Direction::Right).is_ok());
let active_parent = tree.tree.parent_of(tree.active_container.unwrap()).unwrap();
let children = tree.tree.children_of(active_parent);
assert_eq!(tree.tree[children[0]].get_type(), ContainerType::View);
assert_eq!(tree.tree[children[1]].get_type(), ContainerType::View);
}
}
|
#[cfg(feature = "codegen")]
mod tests {
#[test]
fn ui() {
let t = trybuild::TestCases::new();
t.compile_fail("tests/failures/*.rs");
}
}
|
use core::cell::UnsafeCell;
use core::intrinsics;
use libc::{self, c_int};
use super::Duration;
/// Returns the platform-specific value of errno
pub fn errno() -> i32 {
#[cfg(any(target_os = "macos",
target_os = "ios",
target_os = "freebsd"))]
unsafe fn errno_location() -> *const c_int {
extern { fn __error() -> *const c_int; }
__error()
}
#[cfg(target_os = "bitrig")]
fn errno_location() -> *const c_int {
extern {
fn __errno() -> *const c_int;
}
unsafe {
__errno()
}
}
#[cfg(target_os = "dragonfly")]
unsafe fn errno_location() -> *const c_int {
extern { fn __dfly_error() -> *const c_int; }
__dfly_error()
}
#[cfg(target_os = "openbsd")]
unsafe fn errno_location() -> *const c_int {
extern { fn __errno() -> *const c_int; }
__errno()
}
#[cfg(any(target_os = "linux", target_os = "android"))]
unsafe fn errno_location() -> *const c_int {
extern { fn __errno_location() -> *const c_int; }
__errno_location()
}
unsafe {
(*errno_location()) as i32
}
}
pub fn sleep(dur: Duration) {
let mut ts = libc::timespec {
tv_sec: dur.secs() as libc::time_t,
tv_nsec: dur.extra_nanos() as libc::c_long,
};
// If we're awoken with a signal then the return value will be -1 and
// nanosleep will fill in `ts` with the remaining time.
unsafe {
while libc::nanosleep(&ts, &mut ts) == -1 {
if errno() == libc::EINTR { intrinsics::abort(); }
}
}
}
pub mod ffi {
use libc;
pub use self::os::{PTHREAD_MUTEX_INITIALIZER, pthread_mutex_t};
extern {
// mutexes
pub fn pthread_mutex_lock(lock: *mut pthread_mutex_t) -> libc::c_int;
pub fn pthread_mutex_unlock(lock: *mut pthread_mutex_t) -> libc::c_int;
}
#[cfg(any(target_os = "freebsd",
target_os = "dragonfly",
target_os = "bitrig",
target_os = "openbsd"))]
mod os {
use libc;
pub type pthread_mutex_t = *mut libc::c_void;
pub const PTHREAD_MUTEX_INITIALIZER: pthread_mutex_t = 0 as *mut _;
}
#[cfg(any(target_os = "macos", target_os = "ios"))]
mod os {
use libc;
#[cfg(any(target_arch = "x86_64",
target_arch = "aarch64"))]
const __PTHREAD_MUTEX_SIZE__: usize = 56;
#[cfg(any(target_arch = "x86",
target_arch = "arm"))]
const __PTHREAD_MUTEX_SIZE__: usize = 40;
const _PTHREAD_MUTEX_SIG_INIT: libc::c_long = 0x32AAABA7;
#[repr(C)]
pub struct pthread_mutex_t {
__sig: libc::c_long,
__opaque: [u8; __PTHREAD_MUTEX_SIZE__],
}
pub const PTHREAD_MUTEX_INITIALIZER: pthread_mutex_t = pthread_mutex_t {
__sig: _PTHREAD_MUTEX_SIG_INIT,
__opaque: [0; __PTHREAD_MUTEX_SIZE__],
};
}
#[cfg(target_os = "linux")]
mod os {
use core::cell::UnsafeCell;
use core::mem;
use libc;
// minus 8 because we have an 'align' field
#[cfg(target_arch = "x86_64")]
const __SIZEOF_PTHREAD_MUTEX_T: usize = 40 - 8;
#[cfg(any(target_arch = "x86",
target_arch = "arm",
target_arch = "mips",
target_arch = "mipsel",
target_arch = "powerpc"))]
const __SIZEOF_PTHREAD_MUTEX_T: usize = 24 - 8;
#[cfg(target_arch = "aarch64")]
const __SIZEOF_PTHREAD_MUTEX_T: usize = 48 - 8;
#[repr(C)]
pub struct pthread_mutex_t {
__align: libc::c_longlong,
size: [u8; __SIZEOF_PTHREAD_MUTEX_T],
}
pub const PTHREAD_MUTEX_INITIALIZER: pthread_mutex_t = pthread_mutex_t {
__align: 0,
size: [0; __SIZEOF_PTHREAD_MUTEX_T],
};
}
#[cfg(target_os = "android")]
mod os {
use libc;
#[repr(C)]
pub struct pthread_mutex_t { value: libc::c_int }
pub const PTHREAD_MUTEX_INITIALIZER: pthread_mutex_t = pthread_mutex_t {
value: 0,
};
}
}
pub struct Mutex { inner: UnsafeCell<ffi::pthread_mutex_t> }
pub const MUTEX_INIT: Mutex = Mutex {
inner: UnsafeCell { value: ffi::PTHREAD_MUTEX_INITIALIZER },
};
unsafe impl Send for Mutex {}
unsafe impl Sync for Mutex {}
impl Mutex {
#[inline]
pub unsafe fn lock(&self) {
let _r = ffi::pthread_mutex_lock(self.inner.get());
//debug_assert_eq!(r, 0);
}
#[inline]
pub unsafe fn unlock(&self) {
let _r = ffi::pthread_mutex_unlock(self.inner.get());
//debug_assert_eq!(r, 0);
}
}
|
use whoami;
mod repl;
fn main() {
let username = whoami::username();
println!(
"Hello {}! This is the Monkey programming language!",
username
);
println!("Feel free to type in commands");
repl::start();
}
|
use std::old_io;
use nix::errno::{SysError, EAGAIN, EADDRINUSE};
use self::MioErrorKind::{
Eof,
BufUnderflow,
BufOverflow,
WouldBlock,
AddrInUse,
EventLoopTerminated,
OtherError
};
pub type MioResult<T> = Result<T, MioError>;
#[derive(Copy, Show, PartialEq, Clone)]
pub struct MioError {
pub kind: MioErrorKind,
sys: Option<SysError>
}
#[derive(Copy, Show, PartialEq, Clone)]
pub enum MioErrorKind {
Eof, // End of file or socket closed
WouldBlock, // The operation would have blocked
AddrInUse, // Inet socket address or domain socket path already in use
BufUnderflow, // Buf does not contain enough data to perform read op
BufOverflow, // Buf does not contain enough capacity to perform write op
EventLoopTerminated, // The event loop is not running anymore
OtherError, // System error not covered by other kinds
}
impl MioError {
pub fn eof() -> MioError {
MioError {
kind: Eof,
sys: None
}
}
pub fn buf_underflow() -> MioError {
MioError {
kind: BufUnderflow,
sys: None
}
}
pub fn buf_overflow() -> MioError {
MioError {
kind: BufOverflow,
sys: None
}
}
pub fn from_sys_error(err: SysError) -> MioError {
let kind = match err.kind {
EAGAIN => WouldBlock,
EADDRINUSE => AddrInUse,
_ => OtherError
};
MioError {
kind: kind,
sys: Some(err)
}
}
pub fn is_eof(&self) -> bool {
match self.kind {
Eof => true,
_ => false
}
}
pub fn is_would_block(&self) -> bool {
match self.kind {
WouldBlock => true,
_ => false
}
}
pub fn is_buf_underflow(&self) -> bool {
match self.kind {
BufUnderflow => true,
_ => false
}
}
pub fn is_buf_overflow(&self) -> bool {
match self.kind {
BufOverflow => true,
_ => false
}
}
pub fn as_io_error(&self) -> old_io::IoError {
use std::old_io::OtherIoError;
match self.kind {
Eof | BufUnderflow | BufOverflow => old_io::standard_error(old_io::EndOfFile),
WouldBlock => old_io::standard_error(old_io::ResourceUnavailable),
AddrInUse => old_io::standard_error(old_io::PathAlreadyExists),
OtherError => match self.sys {
Some(err) => old_io::IoError::from_errno(err.kind as usize, false),
None => old_io::standard_error(old_io::OtherIoError)
},
EventLoopTerminated => old_io::standard_error(OtherIoError)
}
}
}
|
//! Completion Queue
use core::sync::atomic;
use super::sys;
use super::util::{unsync_load, Mmap};
pub struct CompletionQueue {
pub(crate) head: *const atomic::AtomicU32,
pub(crate) tail: *const atomic::AtomicU32,
pub(crate) ring_mask: *const u32,
pub(crate) ring_entries: *const u32,
overflow: *const atomic::AtomicU32,
pub(crate) cqes: *const sys::io_uring_cqe,
#[allow(dead_code)]
flags: *const atomic::AtomicU32,
}
impl Default for CompletionQueue {
fn default() -> Self {
return Self {
head: 0 as *const atomic::AtomicU32,
tail: 0 as *const atomic::AtomicU32,
ring_mask: 0 as *const u32,
ring_entries: 0 as *const u32,
overflow: 0 as *const atomic::AtomicU32,
cqes: 0 as *const sys::io_uring_cqe,
flags: 0 as *const atomic::AtomicU32,
}
}
}
impl CompletionQueue {
pub fn CopyTo(&self, to: &mut Self) {
to.head = self.head;
to.tail = self.tail;
to.ring_mask = self.ring_mask;
to.ring_entries = self.ring_entries;
to.overflow = self.overflow;
to.cqes = self.cqes;
to.flags = self.flags;
}
}
/// Completion Entry
#[repr(transparent)]
#[derive(Clone, Default)]
pub struct Entry(pub(crate) sys::io_uring_cqe);
impl CompletionQueue {
pub(crate) unsafe fn new(cq_mmap: &Mmap, p: &sys::io_uring_params) -> CompletionQueue {
mmap_offset! {
let head = cq_mmap + p.cq_off.head => *const atomic::AtomicU32;
let tail = cq_mmap + p.cq_off.tail => *const atomic::AtomicU32;
let ring_mask = cq_mmap + p.cq_off.ring_mask => *const u32;
let ring_entries = cq_mmap + p.cq_off.ring_entries => *const u32;
let overflow = cq_mmap + p.cq_off.overflow => *const atomic::AtomicU32;
let cqes = cq_mmap + p.cq_off.cqes => *const sys::io_uring_cqe;
let flags = cq_mmap + p.cq_off.flags => *const atomic::AtomicU32;
}
CompletionQueue {
head,
tail,
ring_mask,
ring_entries,
overflow,
cqes,
flags,
}
}
/// If queue is full, the new event maybe dropped.
/// This value records number of dropped events.
pub fn overflow(&self) -> u32 {
unsafe { (*self.overflow).load(atomic::Ordering::SeqCst) }
}
#[cfg(feature = "unstable")]
pub fn eventfd_disabled(&self) -> bool {
unsafe {
(*self.flags).load(atomic::Ordering::Acquire) & sys::IORING_CQ_EVENTFD_DISABLED != 0
}
}
#[inline]
pub fn capacity(&self) -> usize {
unsafe { self.ring_entries.read() as usize }
}
#[inline]
pub fn len(&self) -> usize {
unsafe {
let head = unsync_load(self.head);
let tail = (*self.tail).load(atomic::Ordering::Acquire);
tail.wrapping_sub(head) as usize
}
}
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
pub fn is_full(&self) -> bool {
self.len() == self.capacity()
}
pub fn next(&mut self) -> Option<Entry> {
unsafe {
let head = unsync_load(self.head);
let tail = (*self.tail).load(atomic::Ordering::Acquire);
let ring_mask = self.ring_mask.read();
if head != tail {
let entry = self.cqes.add((head & ring_mask) as usize);
(*self.head).store(head.wrapping_add(1), atomic::Ordering::Release);
Some(Entry(*entry))
} else {
None
}
}
}
/* /// Get currently available completion queue
pub fn available1(&mut self) -> AvailableQueue<'_> {
unsafe {
AvailableQueue {
head: unsync_load(self.head),
tail: (*self.tail).load(atomic::Ordering::Acquire),
ring_mask: self.ring_mask.read(),
ring_entries: self.ring_entries.read(),
queue: self,
}
}
}
*/
}
/*
pub struct AvailableQueue<'a> {
head: u32,
tail: u32,
ring_mask: u32,
ring_entries: u32,
queue: &'a mut CompletionQueue,
}
impl AvailableQueue<'_> {
/// Sync queue
pub fn sync(&mut self) {
unsafe {
(*self.queue.head).store(self.head, atomic::Ordering::Release);
self.tail = (*self.queue.tail).load(atomic::Ordering::Acquire);
}
}
#[inline]
pub fn capacity(&self) -> usize {
self.ring_entries as usize
}
#[inline]
pub fn is_full(&self) -> bool {
self.len() == self.capacity()
}
}
impl ExactSizeIterator for AvailableQueue<'_> {
#[inline]
fn len(&self) -> usize {
self.tail.wrapping_sub(self.head) as usize
}
}
impl Iterator for AvailableQueue<'_> {
type Item = Entry;
fn next(&mut self) -> Option<Self::Item> {
if self.head != self.tail {
unsafe {
let entry = self.queue.cqes.add((self.head & self.ring_mask) as usize);
error!("AvailableQueue::Next head is {}", self.head);
self.head = self.head.wrapping_add(1);
Some(Entry(*entry))
}
} else {
None
}
}
}
impl Drop for AvailableQueue<'_> {
fn drop(&mut self) {
unsafe {
(*self.queue.head).store(self.head, atomic::Ordering::Release);
}
}
}
*/
impl Entry {
/// Result value
pub fn result(&self) -> i32 {
self.0.res
}
/// User Data
///
/// See [Entry::user_data](super::squeue::Entry::user_data).
pub fn user_data(&self) -> u64 {
self.0.user_data
}
/// Flags
pub fn flags(&self) -> u32 {
self.0.flags
}
}
|
extern crate libc;
pub mod jalali_bindings;
mod data_structs;
mod wrappers;
#[cfg(test)]
mod test;
pub use data_structs::*;
pub use wrappers::*;
|
use std::{env, io};
use list_dirs::printer;
fn main() -> io::Result<()> {
#[cfg(windows)]
let _enabled = ansi_term::enable_ansi_support();
let mut args: Vec<String> = env::args().collect();
match args.len() {
1 => args.push(String::from(".")),
2 => (),
_ => {
return Err(io::Error::new(
io::ErrorKind::Other,
"too many or too less arguments",
))
}
}
printer(&args[1])?;
Ok(())
}
|
use std::convert::TryFrom;
use byteorder::{ByteOrder, LittleEndian};
use chrono::{DateTime, Datelike, NaiveDate, NaiveDateTime, NaiveTime, Timelike, Utc};
use crate::decode::Decode;
use crate::encode::Encode;
use crate::io::{Buf, BufMut};
use crate::mysql::protocol::TypeId;
use crate::mysql::type_info::MySqlTypeInfo;
use crate::mysql::{MySql, MySqlData, MySqlValue};
use crate::types::Type;
use crate::Error;
use std::str::from_utf8;
impl Type<MySql> for DateTime<Utc> {
fn type_info() -> MySqlTypeInfo {
MySqlTypeInfo::new(TypeId::TIMESTAMP)
}
}
impl Encode<MySql> for DateTime<Utc> {
fn encode(&self, buf: &mut Vec<u8>) {
Encode::<MySql>::encode(&self.naive_utc(), buf);
}
}
impl<'de> Decode<'de, MySql> for DateTime<Utc> {
fn decode(value: MySqlValue<'de>) -> crate::Result<Self> {
let naive: NaiveDateTime = Decode::<MySql>::decode(value)?;
Ok(DateTime::from_utc(naive, Utc))
}
}
impl Type<MySql> for NaiveTime {
fn type_info() -> MySqlTypeInfo {
MySqlTypeInfo::new(TypeId::TIME)
}
}
impl Encode<MySql> for NaiveTime {
fn encode(&self, buf: &mut Vec<u8>) {
let len = Encode::<MySql>::size_hint(self) - 1;
buf.push(len as u8);
// NaiveTime is not negative
buf.push(0);
// "date on 4 bytes little-endian format" (?)
// https://mariadb.com/kb/en/resultset-row/#teimstamp-binary-encoding
buf.advance(4);
encode_time(self, len > 9, buf);
}
fn size_hint(&self) -> usize {
if self.nanosecond() == 0 {
// if micro_seconds is 0, length is 8 and micro_seconds is not sent
9
} else {
// otherwise length is 12
13
}
}
}
impl<'de> Decode<'de, MySql> for NaiveTime {
fn decode(buf: MySqlValue<'de>) -> crate::Result<Self> {
match buf.try_get()? {
MySqlData::Binary(mut buf) => {
// data length, expecting 8 or 12 (fractional seconds)
let len = buf.get_u8()?;
// is negative : int<1>
let is_negative = buf.get_u8()?;
assert_eq!(is_negative, 0, "Negative dates/times are not supported");
// "date on 4 bytes little-endian format" (?)
// https://mariadb.com/kb/en/resultset-row/#timestamp-binary-encoding
buf.advance(4);
decode_time(len - 5, buf)
}
MySqlData::Text(buf) => {
let s = from_utf8(buf).map_err(Error::decode)?;
NaiveTime::parse_from_str(s, "%H:%M:%S%.f").map_err(Error::decode)
}
}
}
}
impl Type<MySql> for NaiveDate {
fn type_info() -> MySqlTypeInfo {
MySqlTypeInfo::new(TypeId::DATE)
}
}
impl Encode<MySql> for NaiveDate {
fn encode(&self, buf: &mut Vec<u8>) {
buf.push(4);
encode_date(self, buf);
}
fn size_hint(&self) -> usize {
5
}
}
impl<'de> Decode<'de, MySql> for NaiveDate {
fn decode(buf: MySqlValue<'de>) -> crate::Result<Self> {
match buf.try_get()? {
MySqlData::Binary(buf) => Ok(decode_date(&buf[1..])),
MySqlData::Text(buf) => {
let s = from_utf8(buf).map_err(Error::decode)?;
NaiveDate::parse_from_str(s, "%Y-%m-%d").map_err(Error::decode)
}
}
}
}
impl Type<MySql> for NaiveDateTime {
fn type_info() -> MySqlTypeInfo {
MySqlTypeInfo::new(TypeId::DATETIME)
}
}
impl Encode<MySql> for NaiveDateTime {
fn encode(&self, buf: &mut Vec<u8>) {
let len = Encode::<MySql>::size_hint(self) - 1;
buf.push(len as u8);
encode_date(&self.date(), buf);
if len > 4 {
encode_time(&self.time(), len > 8, buf);
}
}
fn size_hint(&self) -> usize {
// to save space the packet can be compressed:
match (
self.hour(),
self.minute(),
self.second(),
self.timestamp_subsec_nanos(),
) {
// if hour, minutes, seconds and micro_seconds are all 0,
// length is 4 and no other field is sent
(0, 0, 0, 0) => 5,
// if micro_seconds is 0, length is 7
// and micro_seconds is not sent
(_, _, _, 0) => 8,
// otherwise length is 11
(_, _, _, _) => 12,
}
}
}
impl<'de> Decode<'de, MySql> for NaiveDateTime {
fn decode(buf: MySqlValue<'de>) -> crate::Result<Self> {
match buf.try_get()? {
MySqlData::Binary(buf) => {
let len = buf[0];
let date = decode_date(&buf[1..]);
let dt = if len > 4 {
date.and_time(decode_time(len - 4, &buf[5..])?)
} else {
date.and_hms(0, 0, 0)
};
Ok(dt)
}
MySqlData::Text(buf) => {
let s = from_utf8(buf).map_err(Error::decode)?;
NaiveDateTime::parse_from_str(s, "%Y-%m-%d %H:%M:%S%.f").map_err(Error::decode)
}
}
}
}
fn encode_date(date: &NaiveDate, buf: &mut Vec<u8>) {
// MySQL supports years from 1000 - 9999
let year = u16::try_from(date.year())
.unwrap_or_else(|_| panic!("NaiveDateTime out of range for Mysql: {}", date));
buf.extend_from_slice(&year.to_le_bytes());
buf.push(date.month() as u8);
buf.push(date.day() as u8);
}
fn decode_date(buf: &[u8]) -> NaiveDate {
NaiveDate::from_ymd(
LittleEndian::read_u16(buf) as i32,
buf[2] as u32,
buf[3] as u32,
)
}
fn encode_time(time: &NaiveTime, include_micros: bool, buf: &mut Vec<u8>) {
buf.push(time.hour() as u8);
buf.push(time.minute() as u8);
buf.push(time.second() as u8);
if include_micros {
buf.put_u32::<LittleEndian>((time.nanosecond() / 1000) as u32);
}
}
fn decode_time(len: u8, mut buf: &[u8]) -> crate::Result<NaiveTime> {
let hour = buf.get_u8()?;
let minute = buf.get_u8()?;
let seconds = buf.get_u8()?;
let micros = if len > 3 {
// microseconds : int<EOF>
buf.get_uint::<LittleEndian>(buf.len())?
} else {
0
};
Ok(NaiveTime::from_hms_micro(
hour as u32,
minute as u32,
seconds as u32,
micros as u32,
))
}
#[test]
fn test_encode_date_time() {
let mut buf = Vec::new();
// test values from https://dev.mysql.com/doc/internals/en/binary-protocol-value.html
let date1: NaiveDateTime = "2010-10-17T19:27:30.000001".parse().unwrap();
Encode::<MySql>::encode(&date1, &mut buf);
assert_eq!(*buf, [11, 218, 7, 10, 17, 19, 27, 30, 1, 0, 0, 0]);
buf.clear();
let date2: NaiveDateTime = "2010-10-17T19:27:30".parse().unwrap();
Encode::<MySql>::encode(&date2, &mut buf);
assert_eq!(*buf, [7, 218, 7, 10, 17, 19, 27, 30]);
buf.clear();
let date3: NaiveDateTime = "2010-10-17T00:00:00".parse().unwrap();
Encode::<MySql>::encode(&date3, &mut buf);
assert_eq!(*buf, [4, 218, 7, 10, 17]);
}
#[test]
fn test_decode_date_time() {
// test values from https://dev.mysql.com/doc/internals/en/binary-protocol-value.html
let buf = [11, 218, 7, 10, 17, 19, 27, 30, 1, 0, 0, 0];
let date1 = <NaiveDateTime as Decode<MySql>>::decode(MySqlValue::binary(
MySqlTypeInfo::default(),
&buf,
))
.unwrap();
assert_eq!(date1.to_string(), "2010-10-17 19:27:30.000001");
let buf = [7, 218, 7, 10, 17, 19, 27, 30];
let date2 = <NaiveDateTime as Decode<MySql>>::decode(MySqlValue::binary(
MySqlTypeInfo::default(),
&buf,
))
.unwrap();
assert_eq!(date2.to_string(), "2010-10-17 19:27:30");
let buf = [4, 218, 7, 10, 17];
let date3 = <NaiveDateTime as Decode<MySql>>::decode(MySqlValue::binary(
MySqlTypeInfo::default(),
&buf,
))
.unwrap();
assert_eq!(date3.to_string(), "2010-10-17 00:00:00");
}
#[test]
fn test_encode_date() {
let mut buf = Vec::new();
let date: NaiveDate = "2010-10-17".parse().unwrap();
Encode::<MySql>::encode(&date, &mut buf);
assert_eq!(*buf, [4, 218, 7, 10, 17]);
}
#[test]
fn test_decode_date() {
let buf = [4, 218, 7, 10, 17];
let date =
<NaiveDate as Decode<MySql>>::decode(MySqlValue::binary(MySqlTypeInfo::default(), &buf))
.unwrap();
assert_eq!(date.to_string(), "2010-10-17");
}
|
// ...
use proc_macro::{TokenStream, TokenTree, Delimiter};
use crate::{Error, Result, Parse};
use crate::name::Type;
use crate::utils;
// Params represents function's parameters
#[derive(Debug)]
pub struct Params {
pub items: Vec<ParamItem>,
}
impl ToString for Params {
fn to_string(&self) -> String {
format!("({})",
self.items
.iter()
.map(|x| x.to_string())
.collect::<Vec<String>>()
.join(", "))
}
}
impl Parse for Params {
fn parse(input: TokenStream) -> Result<Self> {
// handle empty input
if input.is_empty() {
return Err(Error::new("got empty input when parsing param item."));
}
// expect one token for param: (...)
let token = input
.clone()
.into_iter()
.next();
match &token {
Some(TokenTree::Group(group)) if group.delimiter() == Delimiter::Parenthesis => {
return Ok(Params {
items: utils::split_tokens_by(group.stream(), ",")
.into_iter()
.map(|x| ParamItem::parse(x))
.collect::<Result<Vec<ParamItem>>>()?
});
}
_ => {
return Err(Error::new(format!("malformed param values: {}.", input)));
}
}
}
}
#[derive(Debug)]
pub struct ParamItem {
// an ident is optional because when it is self,
// the ident can be omitted.
pub ident: Option<String>,
pub ty: Type,
}
impl ToString for ParamItem {
fn to_string(&self) -> String {
let mut s = String::new();
if let Some(ident) = &self.ident {
s.push_str(&ident);
s.push_str(": ");
}
s.push_str(&self.ty.to_string());
s
}
}
impl Parse for ParamItem {
fn parse(input: TokenStream) -> Result<Self> {
// handle empty input
if input.is_empty() {
return Err(Error::new("got empty input when parsing param item."));
}
// split ident and type by ':'
let mut tokens = utils::split_tokens_by(input.clone(), ":");
// validate
if ![1, 2].contains(&tokens.len()) {
return Err(Error::new(format!("malformed field: {}.", input)));
}
// parse type
let ty = match tokens.pop() {
Some(token) => Type::parse(token)?,
_ => return Err(Error::new(format!("malformed field: {}.", input)))
};
// parse name
let ident = match tokens.pop() {
Some(token) => match token.into_iter().next() {
Some(TokenTree::Ident(ident)) => Some(ident.to_string()),
Some(other) => return Err(Error::new(format!("field name should be an ident, got {}.", other))),
_ => None,
}
_ => None,
};
// return
Ok(ParamItem { ident, ty })
}
} |
use bytes::{BufMut, Bytes, BytesMut};
use model::command::SmtpCommand;
use model::controll::*;
use tokio::io;
use tokio_codec::{Decoder, Encoder};
pub struct LineCodec {
next_index: usize,
}
impl LineCodec {
pub fn new() -> Self {
LineCodec { next_index: 0 }
}
}
impl Decoder for LineCodec {
type Item = ServerControll;
type Error = io::Error;
fn decode(&mut self, buf: &mut BytesMut) -> Result<Option<ServerControll>, io::Error> {
// Look for a byte with the value '\n' in buf. Start searching from the search start index.
if let Some(newline_offset) = buf[self.next_index..].iter().position(|b| *b == b'\n') {
// Found a '\n' in the string.
// The index of the '\n' is at the sum of the start position + the offset found.
let newline_index = newline_offset + self.next_index;
// Split the buffer at the index of the '\n' + 1 to include the '\n'.
// `split_to` returns a new buffer with the contents up to the index.
// The buffer on which `split_to` is called will now start at this index.
let bytes = buf.split_to(newline_index + 1);
// Convert the bytes to a string and panic if the bytes are not valid utf-8.
let line = String::from_utf8(bytes.to_vec());
// Set the search start index back to 0.
self.next_index = 0;
// Return Ok(Some(...)) to signal that a full frame has been produced.
match line {
Ok(line) => Ok(Some(ServerControll::Command(SmtpCommand::Unknown(line)))),
Err(_) => Ok(Some(ServerControll::Invalid(Bytes::from(bytes)))),
}
} else {
// '\n' not found in the string.
// Tell the next call to start searching after the current length of the buffer
// since all of it was scanned and no '\n' was found.
self.next_index = buf.len();
// Ok(None) signifies that more data is needed to produce a full frame.
Ok(None)
}
}
}
impl Encoder for LineCodec {
type Item = ClientControll;
type Error = io::Error;
fn encode(&mut self, item: Self::Item, buf: &mut BytesMut) -> Result<(), Self::Error> {
let line = match item {
ClientControll::Noop => return Ok(()),
ClientControll::Shutdown => return Ok(()),
ClientControll::AcceptData => return Ok(()),
ClientControll::Reply(line) => line.to_string(),
};
// It's important to reserve the amount of space needed. The `bytes` API
// does not grow the buffers implicitly.
// Reserve the length of the string + 1 for the '\n'.
buf.reserve(line.len() + 1);
// String implements IntoBuf, a trait used by the `bytes` API to work with
// types that can be expressed as a sequence of bytes.
buf.put(line);
// Return ok to signal that no error occured.
Ok(())
}
}
|
use std::io;
use anyhow::Result;
use nexers::nexus::Event;
#[test]
fn load() -> Result<()> {
let mut events = Vec::with_capacity(2);
nexers::nexus::read(
io::BufReader::new(io::Cursor::new(&include_bytes!("tiny-file")[..])),
|ev| {
events.push(ev);
Ok(())
},
)?;
assert_eq!(2, events.len());
let d = match &events[0] {
Event::Doc(d) => d,
other => panic!("unexpected event: {other:?}"),
};
assert_eq!("yom", d.id.group);
assert_eq!("yom", d.id.artifact);
assert_eq!("1.0-alpha-2", d.id.version);
assert_eq!(None, d.id.classifier);
assert_eq!("jar", d.object_info.packaging);
let d = match &events[1] {
Event::Doc(d) => d,
other => panic!("unexpected event: {other:?}"),
};
assert_eq!("yom", d.id.group);
assert_eq!("yom", d.id.artifact);
assert_eq!("1.0-alpha-1", d.id.version);
assert_eq!(None, d.id.classifier);
assert_eq!("jar", d.object_info.packaging);
Ok(())
}
#[cfg(never)]
fn print() {
for (e, fields) in &errors {
println!("Error in doc:");
for (name, value) in fields {
println!(" * {:?}: {:?}", name, value);
}
println!("{:?}", e);
println!();
}
println!(
"{} errors, {} deletions, {} docs",
errors.len(),
deletions.len(),
docs.len()
);
docs.retain(|v| !deletions.contains(&v.id));
println!("{} live docs", docs.len());
}
|
use crate::components::login::login_form::LoginForm;
use crate::components::shared::page::Page;
use yew::prelude::*;
pub struct LoginPage;
impl Component for LoginPage {
type Message = ();
type Properties = ();
fn create(_props: Self::Properties, _link: ComponentLink<Self>) -> Self {
Self {}
}
fn update(&mut self, _msg: Self::Message) -> ShouldRender {
false
}
fn change(&mut self, _props: Self::Properties) -> ShouldRender {
// Should only return "true" if new properties are different to
// previously received properties.
// This component has no properties so we will always return "false".
false
}
fn view(&self) -> Html {
html! {
<Page>
<main class="min-w-full min-h-screen flex justify-center items-center bg-purple-500">
<LoginForm />
</main>
</Page>
}
}
}
|
use crate::{
parser::*,
tokenizer::{Interpol as TokenInterpol, Token, TokenKind, Trivia}
};
use std::fmt;
impl fmt::Display for Trivia {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Trivia::Newlines(amount) => for _ in 0..amount {
write!(f, "\n")?;
},
Trivia::Spaces(amount) => for _ in 0..amount {
write!(f, " ")?;
},
Trivia::Tabs(amount) => for _ in 0..amount {
write!(f, "\t")?;
},
Trivia::Comment { span: _, multiline, ref content } => {
if multiline {
write!(f, "/*")?;
} else {
write!(f, "#")?;
}
write!(f, "{}", content)?;
if multiline {
write!(f, "*/")?;
}
},
}
Ok(())
}
}
impl fmt::Display for TokenKind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", match self {
TokenKind::Assert => "assert",
TokenKind::Else => "else",
TokenKind::If => "if",
TokenKind::Import => "import",
TokenKind::In => "in",
TokenKind::Inherit => "inherit",
TokenKind::Let => "let",
TokenKind::Rec => "rec",
TokenKind::Then => "then",
TokenKind::With => "with",
TokenKind::CurlyBOpen => "{",
TokenKind::CurlyBClose => "}",
TokenKind::SquareBOpen => "[",
TokenKind::SquareBClose => "]",
TokenKind::Assign => "=",
TokenKind::At => "@",
TokenKind::Colon => ":",
TokenKind::Comma => ",",
TokenKind::Dot => ".",
TokenKind::Ellipsis => "...",
TokenKind::Question => "?",
TokenKind::Semicolon => ";",
TokenKind::ParenOpen => "(",
TokenKind::ParenClose => ")",
TokenKind::Concat => "++",
TokenKind::Invert => "!",
TokenKind::Merge => "//",
TokenKind::Add => "+",
TokenKind::Sub => "-",
TokenKind::Mul => "*",
TokenKind::Div => "/",
TokenKind::And => "&&",
TokenKind::Equal => "==",
TokenKind::Implication => "->",
TokenKind::Less => "<",
TokenKind::LessOrEq => "<=",
TokenKind::More => ">",
TokenKind::MoreOrEq => ">=",
TokenKind::NotEqual => "!=",
TokenKind::Or => "||",
TokenKind::Dynamic => "${",
TokenKind::EOF
| TokenKind::Ident
| TokenKind::Interpol
| TokenKind::Value => ""
})
}
}
fn fmt_trivia(f: &mut fmt::Formatter, trivia: &[Trivia]) -> fmt::Result {
for trivia in trivia {
write!(f, "{}", trivia)?;
}
Ok(())
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
macro_rules! fmt {
($meta:expr, $fmt:expr $(, $arg:expr)*) => {{
fmt_trivia(f, &$meta.leading)?;
write!(f, $fmt $(, $arg)*)?;
fmt_trivia(f, &$meta.trailing)?;
}};
}
match self {
Token::Simple(kind) => write!(f, "{}", kind)?,
Token::Dynamic(tokens, close) => {
write!(f, "${{")?;
for (meta, token) in tokens {
fmt!(meta, "{}", token);
}
fmt!(close, "}}");
},
Token::Ident(name) => write!(f, "{}", name)?,
Token::Interpol { multiline, parts } => {
if *multiline {
write!(f, "''")?;
} else {
write!(f, "\"")?;
}
for part in parts {
match part {
TokenInterpol::Literal { original, content: _, span: _ } => write!(f, "{}", original)?,
TokenInterpol::Tokens(tokens, close) => {
write!(f, "${{")?;
for (meta, token) in tokens {
fmt!(meta, "{}", token);
}
fmt!(close, "}}");
}
}
}
if *multiline {
write!(f, "''")?;
} else {
write!(f, "\"")?;
}
},
Token::Value(val) => write!(f, "{}", val)?
}
Ok(())
}
}
fn fmt_node(f: &mut fmt::Formatter, arena: &Arena, id: NodeId) -> fmt::Result {
let node = &arena[id];
macro_rules! fmt {
($meta:expr, $fmt:expr $(, $arg:expr)*) => {{
fmt_trivia(f, &$meta.leading)?;
write!(f, $fmt $(, $arg)*)?;
fmt_trivia(f, &$meta.trailing)?;
}};
}
match &node.data {
Data::Error((_span, err)) => panic!("attempt to print out AST, but it has an error: {}", err),
Data::Ident(meta, name) => fmt!(meta, "{}", name),
Data::Interpol { meta, multiline } => {
fmt_trivia(f, &meta.leading)?;
write!(f, "{}", if *multiline { "''" } else { "\"" })?;
},
Data::InterpolLiteral { original, content: _ } => write!(f, "{}", original)?,
Data::None => (),
Data::Token(meta, token) => fmt!(meta, "{}", token),
Data::Value(meta, value) => fmt!(meta, "{}", value)
}
if node.kind == ASTKind::InterpolAst {
write!(f, "${{")?;
}
for child in node.children(arena) {
fmt_node(f, arena, child)?;
}
if let Data::Interpol { meta, multiline } = &node.data {
write!(f, "{}", if *multiline { "''" } else { "\"" })?;
fmt_trivia(f, &meta.trailing)?;
}
Ok(())
}
impl<'a> fmt::Display for AST<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt_node(f, &self.arena, self.root)
}
}
|
#[cfg(feature = "build-native-harfbuzz")]
extern crate cmake;
#[cfg(feature = "build-native-harfbuzz")]
extern crate pkg_config;
#[cfg(feature = "build-native-harfbuzz")]
fn main() {
use std::env;
use std::process::Command;
use std::path::PathBuf;
println!("cargo:rerun-if-env-changed=HARFBUZZ_SYS_NO_PKG_CONFIG");
if env::var_os("HARFBUZZ_SYS_NO_PKG_CONFIG").is_none() {
if pkg_config::find_library("harfbuzz").is_ok() {
return;
}
}
// On Windows, HarfBuzz configures atomics directly; otherwise,
// it needs assistance from configure to do so. Just use the makefile
// build for now elsewhere.
let target = env::var("TARGET").unwrap();
if target.contains("windows") {
let dst = cmake::Config::new("harfbuzz").build();
println!("cargo:rustc-link-search=native={}/lib", dst.display());
println!("cargo:rustc-link-lib=static=harfbuzz");
if target.contains("gnu") {
println!("cargo:rustc-link-lib=stdc++");
}
} else {
assert!(
Command::new("make")
.env("MAKEFLAGS", env::var("CARGO_MAKEFLAGS").unwrap_or_default())
.args(&["-R", "-f", "makefile.cargo"])
.status()
.unwrap()
.success()
);
let out_dir = PathBuf::from(env::var_os("OUT_DIR").unwrap());
println!(
"cargo:rustc-link-search=native={}",
out_dir.join("lib").to_str().unwrap()
);
println!("cargo:rustc-link-lib=static=harfbuzz");
}
// Dependent crates that need to find hb.h can use DEP_HARFBUZZ_INCLUDE from their build.rs.
println!(
"cargo:include={}",
env::current_dir().unwrap().join("harfbuzz/src").display()
);
}
#[cfg(not(feature = "build-native-harfbuzz"))]
fn main() {}
|
pub(crate) mod poll_fd;
#[cfg(not(windows))]
pub(crate) mod types;
#[cfg_attr(windows, path = "windows_syscalls.rs")]
pub(crate) mod syscalls;
#[cfg(linux_kernel)]
pub mod epoll;
|
use std::{fmt::Debug, rc::Rc};
use super::DynType;
#[derive(Clone)]
pub struct Value {
pub content: Rc<DynType>,
pub position: Option<(u32, u16)>,
}
impl Value {
pub fn new(content: DynType, position: Option<(u32, u16)>) -> Self {
Self {
content: Rc::new(content),
position,
}
}
}
impl Debug for Value {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Debug::fmt(&self.content, f)?;
if let Some(position) = self.position {
write!(f, " [{}, {}]", position.0, position.1)
} else {
write!(f, " []")
}
}
}
|
use crate::server::LSPServer;
use crate::sources::LSPSupport;
use log::{debug, trace};
use ropey::{Rope, RopeSlice};
use std::time::Instant;
use tower_lsp::lsp_types::*;
pub mod keyword;
impl LSPServer {
pub fn completion(&self, params: CompletionParams) -> Option<CompletionResponse> {
debug!("completion requested");
trace!("{:#?}", ¶ms);
let now = Instant::now();
let doc = params.text_document_position;
let file_id = self.srcs.get_id(&doc.text_document.uri).to_owned();
self.srcs.wait_parse_ready(file_id, false);
trace!("comp wait parse: {}", now.elapsed().as_millis());
let file = self.srcs.get_file(file_id)?;
let file = file.read().ok()?;
trace!("comp read: {}", now.elapsed().as_millis());
let token = get_completion_token(
&file.text,
file.text.line(doc.position.line as usize),
doc.position,
);
let response = match params.context {
Some(context) => match context.trigger_kind {
CompletionTriggerKind::TriggerCharacter => {
debug!(
"trigger char completion: {}",
context.trigger_character.clone()?.as_str()
);
match context.trigger_character?.as_str() {
"." => Some(self.srcs.get_dot_completions(
token.trim_end_matches('.'),
file.text.pos_to_byte(&doc.position),
&doc.text_document.uri,
)?),
"$" => Some(CompletionList {
is_incomplete: false,
items: self.sys_tasks.clone(),
}),
"`" => Some(CompletionList {
is_incomplete: false,
items: self.directives.clone(),
}),
_ => None,
}
}
CompletionTriggerKind::TriggerForIncompleteCompletions => None,
CompletionTriggerKind::Invoked => {
debug!("Invoked Completion");
let mut comps = self.srcs.get_completions(
&token,
file.text.pos_to_byte(&doc.position),
&doc.text_document.uri,
)?;
// complete keywords
comps.items.extend::<Vec<CompletionItem>>(
self.key_comps
.iter()
.filter(|x| x.label.starts_with(&token))
.cloned()
.collect(),
);
Some(comps)
}
},
None => {
let trigger = prev_char(&file.text, &doc.position);
match trigger {
'.' => Some(self.srcs.get_dot_completions(
token.trim_end_matches('.'),
file.text.pos_to_byte(&doc.position),
&doc.text_document.uri,
)?),
'$' => Some(CompletionList {
is_incomplete: false,
items: self.sys_tasks.clone(),
}),
'`' => Some(CompletionList {
is_incomplete: false,
items: self.directives.clone(),
}),
_ => {
let mut comps = self.srcs.get_completions(
&token,
file.text.pos_to_byte(&doc.position),
&doc.text_document.uri,
)?;
comps.items.extend::<Vec<CompletionItem>>(
self.key_comps
.iter()
.filter(|x| x.label.starts_with(&token))
.cloned()
.collect(),
);
Some(comps)
}
}
}
};
// eprintln!("comp response: {}", now.elapsed().as_millis());
Some(CompletionResponse::List(response?))
}
}
/// get the previous non-whitespace character
fn prev_char(text: &Rope, pos: &Position) -> char {
let char_idx = text.pos_to_char(pos);
if char_idx > 0 {
for i in (0..char_idx).rev() {
let res = text.char(i);
if !res.is_whitespace() {
return res;
}
}
' '
} else {
' '
}
}
/// attempt to get the token the user was trying to complete, by
/// filtering out characters unneeded for name resolution
fn get_completion_token(text: &Rope, line: RopeSlice, pos: Position) -> String {
let mut token = String::new();
let mut line_iter = line.chars();
for _ in 0..(line.utf16_cu_to_char(pos.character as usize)) {
line_iter.next();
}
let mut c = line_iter.prev();
//TODO: make this a regex
while c.is_some()
&& (c.unwrap().is_alphanumeric()
|| c.unwrap() == '_'
|| c.unwrap() == '.'
|| c.unwrap() == '['
|| c.unwrap() == ']')
{
token.push(c.unwrap());
c = line_iter.prev();
}
let mut result: String = token.chars().rev().collect();
if result.contains('[') {
let l_bracket_offset = result.find('[').unwrap_or_else(|| result.len());
result.replace_range(l_bracket_offset.., "");
}
if &result == "." {
// probably a instantiation, the token should be what we're instatiating
let mut char_iter = text.chars();
let mut token = String::new();
for _ in 0..text.pos_to_char(&pos) {
char_iter.next();
}
let mut c = char_iter.prev();
// go to the last semicolon
while c.is_some() && (c.unwrap() != ';') {
c = char_iter.prev();
}
// go the the start of the next symbol
while c.is_some() && !(c.unwrap().is_alphanumeric() || c.unwrap() == '_') {
c = char_iter.next();
}
// then extract the next symbol
while c.is_some() && (c.unwrap().is_alphanumeric() || c.unwrap() == '_') {
token.push(c.unwrap());
c = char_iter.next();
}
token
} else {
result
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::definition::def_types::Scope;
use crate::definition::get_scopes;
use crate::sources::{parse, LSPSupport};
use crate::support::test_init;
use ropey::Rope;
#[test]
fn test_get_completion_token() {
test_init();
let text = Rope::from_str("abc abc.cba de_fg cde[4]");
let mut result = get_completion_token(
&text,
text.line(0),
Position {
line: 0,
character: 3,
},
);
assert_eq!(&result, "abc");
result = get_completion_token(
&text,
text.line(0),
Position {
line: 0,
character: 11,
},
);
assert_eq!(&result, "abc.cba");
result = get_completion_token(
&text,
text.line(0),
Position {
line: 0,
character: 16,
},
);
assert_eq!(&result, "de_f");
result = get_completion_token(
&text,
text.line(0),
Position {
line: 0,
character: 23,
},
);
assert_eq!(&result, "cde");
}
#[test]
fn test_completion() {
test_init();
let server = LSPServer::new(None);
let uri = Url::parse("file:///test.sv").unwrap();
let text = r#"module test;
logic abc;
logic abcd;
endmodule
"#;
let open_params = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "systemverilog".to_owned(),
version: 0,
text: text.to_owned(),
},
};
server.did_open(open_params);
let fid = server.srcs.get_id(&uri);
server.srcs.wait_parse_ready(fid, true);
let change_params = DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri: uri.clone(),
version: 3,
},
content_changes: vec![
TextDocumentContentChangeEvent {
range: Some(Range {
start: Position {
line: 3,
character: 0,
},
end: Position {
line: 3,
character: 0,
},
}),
range_length: None,
text: "\n".to_owned(),
},
TextDocumentContentChangeEvent {
range: Some(Range {
start: Position {
line: 4,
character: 0,
},
end: Position {
line: 4,
character: 0,
},
}),
range_length: None,
text: " ".to_owned(),
},
TextDocumentContentChangeEvent {
range: Some(Range {
start: Position {
line: 4,
character: 2,
},
end: Position {
line: 4,
character: 2,
},
}),
range_length: None,
text: "a".to_owned(),
},
],
};
server.did_change(change_params);
server.srcs.wait_parse_ready(fid, true);
let completion_params = CompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 4,
character: 3,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: Some(CompletionContext {
trigger_kind: CompletionTriggerKind::Invoked,
trigger_character: None,
}),
};
let response: CompletionResponse = server.completion(completion_params).unwrap();
let item1 = CompletionItem {
label: "abc".to_owned(),
kind: Some(CompletionItemKind::Variable),
detail: Some("logic".to_string()),
..CompletionItem::default()
};
let item2 = CompletionItem {
label: "abcd".to_owned(),
kind: Some(CompletionItemKind::Variable),
detail: Some("logic".to_string()),
..CompletionItem::default()
};
if let CompletionResponse::List(item) = response {
assert!(item.items.contains(&item1));
assert!(item.items.contains(&item2));
} else {
panic!();
}
}
#[test]
fn test_nested_completion() {
test_init();
let server = LSPServer::new(None);
let uri = Url::parse("file:///test.sv").unwrap();
let text = r#"module test;
logic aouter;
function func1();
logic abc;
func1 = abc;
endfunction
function func2();
logic abcd;
func2 = abcd;
endfunction
endmodule
"#;
let open_params = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "systemverilog".to_owned(),
version: 0,
text: text.to_owned(),
},
};
server.did_open(open_params);
let fid = server.srcs.get_id(&uri);
server.srcs.wait_parse_ready(fid, true);
let change_params = DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri: uri.clone(),
version: 3,
},
content_changes: vec![
TextDocumentContentChangeEvent {
range: Some(Range {
start: Position {
line: 4,
character: 0,
},
end: Position {
line: 4,
character: 0,
},
}),
range_length: None,
text: "\n".to_owned(),
},
TextDocumentContentChangeEvent {
range: Some(Range {
start: Position {
line: 4,
character: 0,
},
end: Position {
line: 4,
character: 0,
},
}),
range_length: None,
text: " ".to_owned(),
},
TextDocumentContentChangeEvent {
range: Some(Range {
start: Position {
line: 4,
character: 2,
},
end: Position {
line: 4,
character: 2,
},
}),
range_length: None,
text: "a".to_owned(),
},
],
};
server.did_change(change_params);
server.srcs.wait_parse_ready(fid, true);
let completion_params = CompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 4,
character: 3,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: Some(CompletionContext {
trigger_kind: CompletionTriggerKind::Invoked,
trigger_character: None,
}),
};
let response: CompletionResponse = server.completion(completion_params).unwrap();
let item1 = CompletionItem {
label: "abc".to_owned(),
kind: Some(CompletionItemKind::Variable),
detail: Some("logic".to_string()),
..CompletionItem::default()
};
let item3 = CompletionItem {
label: "aouter".to_owned(),
kind: Some(CompletionItemKind::Variable),
detail: Some("logic".to_string()),
..CompletionItem::default()
};
if let CompletionResponse::List(item) = response {
eprintln!("{:#?}", item);
assert!(item.items.contains(&item1));
for comp in &item.items {
assert!(comp.label != "abcd");
}
assert!(item.items.contains(&item3));
} else {
panic!();
}
}
#[test]
fn test_dot_completion() {
test_init();
let server = LSPServer::new(None);
let uri = Url::parse("file:///test.sv").unwrap();
let text = r#"interface test_inter;
wire abcd;
endinterface
module test(
test_inter abc
);
abc.
test_inter.
endmodule
"#;
let open_params = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "systemverilog".to_owned(),
version: 0,
text: text.to_owned(),
},
};
server.did_open(open_params);
let fid = server.srcs.get_id(&uri);
server.srcs.wait_parse_ready(fid, true);
let file = server.srcs.get_file(fid).unwrap();
let file = file.read().unwrap();
eprintln!("{}", file.syntax_tree.as_ref().unwrap());
eprintln!(
"{:#?}",
server.srcs.scope_tree.read().unwrap().as_ref().unwrap()
);
let completion_params = CompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
position: Position {
line: 6,
character: 8,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: Some(CompletionContext {
trigger_kind: CompletionTriggerKind::TriggerCharacter,
trigger_character: Some(".".to_string()),
}),
};
let response: CompletionResponse = server.completion(completion_params).unwrap();
dbg!(&response);
let item1 = CompletionItem {
label: "abcd".to_owned(),
kind: Some(CompletionItemKind::Variable),
detail: Some("wire".to_string()),
..CompletionItem::default()
};
if let CompletionResponse::List(item) = response {
eprintln!("{:#?}", item);
assert!(item.items.contains(&item1));
assert!(item.items.len() == 1);
} else {
panic!();
}
let completion_params = CompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 7,
character: 14,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: Some(CompletionContext {
trigger_kind: CompletionTriggerKind::TriggerCharacter,
trigger_character: Some(".".to_string()),
}),
};
let response: CompletionResponse = server.completion(completion_params).unwrap();
if let CompletionResponse::List(item) = response {
eprintln!("{:#?}", item);
assert!(item.items.contains(&item1));
assert!(item.items.len() == 1);
} else {
panic!();
}
}
#[test]
fn test_trigger_dot_nocontext() {
test_init();
let server = LSPServer::new(None);
let uri = Url::parse("file:///test.sv").unwrap();
let text = r#"interface test_inter;
wire abcd;
endinterface
module test(
test_inter abc
);
abc.
test_inter.
endmodule
"#;
let open_params = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "systemverilog".to_owned(),
version: 0,
text: text.to_owned(),
},
};
server.did_open(open_params);
let fid = server.srcs.get_id(&uri);
server.srcs.wait_parse_ready(fid, true);
let file = server.srcs.get_file(fid).unwrap();
let file = file.read().unwrap();
eprintln!("{}", file.syntax_tree.as_ref().unwrap());
eprintln!(
"{:#?}",
server.srcs.scope_tree.read().unwrap().as_ref().unwrap()
);
let completion_params = CompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri: uri.clone() },
position: Position {
line: 6,
character: 8,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: None,
};
let response: CompletionResponse = server.completion(completion_params).unwrap();
dbg!(&response);
let item1 = CompletionItem {
label: "abcd".to_owned(),
kind: Some(CompletionItemKind::Variable),
detail: Some("wire".to_string()),
..CompletionItem::default()
};
if let CompletionResponse::List(item) = response {
eprintln!("{:#?}", item);
assert!(item.items.contains(&item1));
assert!(item.items.len() == 1);
} else {
panic!();
}
let completion_params = CompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 7,
character: 14,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: Some(CompletionContext {
trigger_kind: CompletionTriggerKind::TriggerCharacter,
trigger_character: Some(".".to_string()),
}),
};
let response: CompletionResponse = server.completion(completion_params).unwrap();
if let CompletionResponse::List(item) = response {
eprintln!("{:#?}", item);
assert!(item.items.contains(&item1));
assert!(item.items.len() == 1);
} else {
panic!();
}
}
#[test]
fn test_dot_completion_instantiation() {
test_init();
let text = r#"interface test_inter;
wire wrong;
logic clk;
endinterface
module test;
logic clk;
test_inter2 t (
.clk(clk),
.
)
endmodule
interface test_inter2;
wire abcd;
logic clk;
endinterface
"#;
let doc = Rope::from_str(&text);
let url = Url::parse("file:///test.sv").unwrap();
let syntax_tree = parse(&doc, &url, &None, &Vec::new()).unwrap();
let scope_tree = get_scopes(&syntax_tree, &url).unwrap();
let pos = Position::new(8, 9);
let token = get_completion_token(&doc, doc.line(pos.line as usize), pos);
let completions = scope_tree.get_dot_completion(
token.trim_end_matches('.'),
doc.pos_to_byte(&pos),
&url,
&scope_tree,
);
let labels: Vec<String> = completions.iter().map(|x| x.label.clone()).collect();
assert_eq!(labels, vec!["abcd", "clk"]);
}
/*
#[test]
fn test_package_completion() {
test_init();
let text = r#"package p;
struct {int x;} s1;
struct {int x;} s2;
function void f();
int x;
endfunction
endpackage
module m;
import p::*;
if (1) begin : s1
initial begin
s1.x = 1;
f.x = 1;
end
int x;
end
endmodule
"#;
let doc = Rope::from_str(&text);
let url = Url::parse("file:///test.sv").unwrap();
let syntax_tree = parse(&doc, &url, &None, &Vec::new()).unwrap();
let scope_tree = get_scopes(&syntax_tree, &url).unwrap();
dbg!(&scope_tree);
/*
let pos = Position::new(8, 9);
let token = get_completion_token(&doc, doc.line(pos.line as usize), pos);
let completions = scope_tree.get_dot_completion(
token.trim_end_matches('.'),
doc.pos_to_byte(&pos),
&url,
&scope_tree,
);
let labels: Vec<String> = completions.iter().map(|x| x.label.clone()).collect();
assert_eq!(labels, vec!["abcd", "clk"]);
*/
panic!();
}
*/
#[test]
fn test_inter_file_completion() {
test_init();
let server = LSPServer::new(None);
let uri = Url::parse("file:///test.sv").unwrap();
let uri2 = Url::parse("file:///test2.sv").unwrap();
let text = r#"module test;
s
endmodule
"#;
let text2 = r#"interface simple_bus;
logic clk;
endinterface"#;
let open_params = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "systemverilog".to_owned(),
version: 0,
text: text.to_owned(),
},
};
let open_params2 = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri2.clone(),
language_id: "systemverilog".to_owned(),
version: 0,
text: text2.to_owned(),
},
};
server.did_open(open_params);
server.did_open(open_params2);
let fid = server.srcs.get_id(&uri);
let fid2 = server.srcs.get_id(&uri2);
server.srcs.wait_parse_ready(fid, true);
server.srcs.wait_parse_ready(fid2, true);
let completion_params = CompletionParams {
text_document_position: TextDocumentPositionParams {
text_document: TextDocumentIdentifier { uri },
position: Position {
line: 1,
character: 5,
},
},
work_done_progress_params: WorkDoneProgressParams::default(),
partial_result_params: PartialResultParams::default(),
context: Some(CompletionContext {
trigger_kind: CompletionTriggerKind::Invoked,
trigger_character: None,
}),
};
let response: CompletionResponse = server.completion(completion_params).unwrap();
let scope_tree = server.srcs.scope_tree.read().unwrap();
dbg!(scope_tree.as_ref().unwrap());
if let CompletionResponse::List(item) = response {
// eprintln!("{:#?}", item);
let names: Vec<&String> = item.items.iter().map(|x| &x.label).collect();
assert!(names.contains(&&"simple_bus".to_string()));
} else {
panic!();
}
}
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
crate::switchboard::base::*,
crate::switchboard::hanging_get_handler::{HangingGetHandler, Sender},
crate::switchboard::switchboard_impl::SwitchboardImpl,
fidl::endpoints::ServiceMarker,
fidl_fuchsia_media::AudioRenderUsage,
fidl_fuchsia_settings::*,
fuchsia_async as fasync,
futures::lock::Mutex,
futures::prelude::*,
parking_lot::RwLock,
std::sync::Arc,
};
impl Sender<AudioSettings> for AudioWatchResponder {
fn send_response(self, data: AudioSettings) {
self.send(&mut Ok(data)).log_fidl_response_error(AudioMarker::DEBUG_NAME);
}
}
impl From<SettingResponse> for AudioSettings {
fn from(response: SettingResponse) -> Self {
if let SettingResponse::Audio(info) = response {
let mut streams = Vec::new();
for stream in info.streams.iter() {
streams.push(AudioStreamSettings::from(stream.clone()));
}
let mut audio_input = AudioInput::empty();
audio_input.muted = Some(info.input.mic_mute);
let mut audio_settings = AudioSettings::empty();
audio_settings.streams = Some(streams);
audio_settings.input = Some(audio_input);
audio_settings
} else {
panic!("incorrect value sent to audio");
}
}
}
impl From<AudioStream> for AudioStreamSettings {
fn from(stream: AudioStream) -> Self {
AudioStreamSettings {
stream: Some(AudioRenderUsage::from(stream.stream_type)),
source: Some(AudioStreamSettingSource::from(stream.source)),
user_volume: Some(Volume {
level: Some(stream.user_volume_level),
muted: Some(stream.user_volume_muted),
}),
}
}
}
impl From<AudioRenderUsage> for AudioStreamType {
fn from(usage: AudioRenderUsage) -> Self {
match usage {
AudioRenderUsage::Background => AudioStreamType::Background,
AudioRenderUsage::Media => AudioStreamType::Media,
AudioRenderUsage::Interruption => AudioStreamType::Interruption,
AudioRenderUsage::SystemAgent => AudioStreamType::SystemAgent,
AudioRenderUsage::Communication => AudioStreamType::Communication,
}
}
}
impl From<AudioStreamType> for AudioRenderUsage {
fn from(usage: AudioStreamType) -> Self {
match usage {
AudioStreamType::Background => AudioRenderUsage::Background,
AudioStreamType::Media => AudioRenderUsage::Media,
AudioStreamType::Interruption => AudioRenderUsage::Interruption,
AudioStreamType::SystemAgent => AudioRenderUsage::SystemAgent,
AudioStreamType::Communication => AudioRenderUsage::Communication,
}
}
}
impl From<AudioStreamSettingSource> for AudioSettingSource {
fn from(source: AudioStreamSettingSource) -> Self {
match source {
AudioStreamSettingSource::Default => AudioSettingSource::Default,
AudioStreamSettingSource::User => AudioSettingSource::User,
AudioStreamSettingSource::System => AudioSettingSource::System,
}
}
}
impl From<AudioSettingSource> for AudioStreamSettingSource {
fn from(source: AudioSettingSource) -> Self {
match source {
AudioSettingSource::Default => AudioStreamSettingSource::Default,
AudioSettingSource::User => AudioStreamSettingSource::User,
AudioSettingSource::System => AudioStreamSettingSource::System,
}
}
}
fn to_request(settings: AudioSettings) -> Option<SettingRequest> {
let mut request = None;
if let Some(streams_value) = settings.streams {
let mut streams = Vec::new();
for stream in streams_value {
let user_volume = stream.user_volume.unwrap();
streams.push(AudioStream {
stream_type: AudioStreamType::from(stream.stream.unwrap()),
source: AudioSettingSource::from(stream.source.unwrap()),
user_volume_level: user_volume.level.unwrap(),
user_volume_muted: user_volume.muted.unwrap(),
});
}
request = Some(SettingRequest::SetVolume(streams));
}
request
}
pub fn spawn_audio_fidl_handler(
switchboard_handle: Arc<RwLock<SwitchboardImpl>>,
mut stream: AudioRequestStream,
) {
let switchboard_lock = switchboard_handle.clone();
let hanging_get_handler: Arc<Mutex<HangingGetHandler<AudioSettings, AudioWatchResponder>>> =
HangingGetHandler::create(switchboard_handle, SettingType::Audio);
fasync::spawn(async move {
while let Ok(Some(req)) = stream.try_next().await {
// Support future expansion of FIDL
#[allow(unreachable_patterns)]
match req {
AudioRequest::Set { settings, responder } => {
if let Some(request) = to_request(settings) {
set_volume(switchboard_lock.clone(), request, responder)
} else {
responder
.send(&mut Err(Error::Unsupported))
.log_fidl_response_error(AudioMarker::DEBUG_NAME);
}
}
AudioRequest::Watch { responder } => {
let mut hanging_get_lock = hanging_get_handler.lock().await;
hanging_get_lock.watch(responder).await;
}
_ => {}
}
}
});
}
fn set_volume(
switchboard: Arc<RwLock<dyn Switchboard + Send + Sync>>,
request: SettingRequest,
responder: AudioSetResponder,
) {
let (response_tx, response_rx) = futures::channel::oneshot::channel::<SettingResponseResult>();
if switchboard.write().request(SettingType::Audio, request, response_tx).is_ok() {
fasync::spawn(async move {
// Return success if we get a Ok result from the
// switchboard.
if let Ok(Ok(_)) = response_rx.await {
responder.send(&mut Ok(())).log_fidl_response_error(AudioMarker::DEBUG_NAME);
} else {
responder
.send(&mut Err(fidl_fuchsia_settings::Error::Failed))
.log_fidl_response_error(AudioMarker::DEBUG_NAME);
}
});
} else {
responder
.send(&mut Err(fidl_fuchsia_settings::Error::Failed))
.log_fidl_response_error(AudioMarker::DEBUG_NAME);
}
}
|
use procon_reader::ProconReader;
fn main() {
let stdin = std::io::stdin();
let mut rd = ProconReader::new(stdin.lock());
let n: usize = rd.get();
let m: usize = rd.get();
let k: usize = rd.get();
let mut tail = Vec::new();
let mut inv = 0;
let mut a = Vec::new();
for x in (0..n).rev() {
if inv + x <= k {
inv += x;
a.push(x);
} else {
tail.push(x);
}
}
tail.reverse();
a.extend(tail);
let s = a.iter().copied().sum::<usize>();
assert!(s <= m);
let i = a.iter().position(|&x| x == n - 1).unwrap();
a[i] += m - s;
for x in a {
println!("{}", x);
}
}
|
//! Focused state.
crate::state_group! {
[SelectionStateGroup: 0x8000_0000] = {
Unselected = 0,
Selected = 0x8000_0000,
}
}
|
//pub mod bounding;
pub mod collision;
pub mod geometry;
pub use geometry::{
Triangle, Tetrahedron, Pentachoron,
Measure, Degenerable, Decomposable
};
pub use collision::{
Collidable, CollisionDescribable
}; |
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[cfg(feature = "Devices_Sensors_Custom")]
pub mod Custom;
#[link(name = "windows")]
extern "system" {}
pub type Accelerometer = *mut ::core::ffi::c_void;
pub type AccelerometerDataThreshold = *mut ::core::ffi::c_void;
pub type AccelerometerReading = *mut ::core::ffi::c_void;
pub type AccelerometerReadingChangedEventArgs = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct AccelerometerReadingType(pub i32);
impl AccelerometerReadingType {
pub const Standard: Self = Self(0i32);
pub const Linear: Self = Self(1i32);
pub const Gravity: Self = Self(2i32);
}
impl ::core::marker::Copy for AccelerometerReadingType {}
impl ::core::clone::Clone for AccelerometerReadingType {
fn clone(&self) -> Self {
*self
}
}
pub type AccelerometerShakenEventArgs = *mut ::core::ffi::c_void;
pub type ActivitySensor = *mut ::core::ffi::c_void;
pub type ActivitySensorReading = *mut ::core::ffi::c_void;
pub type ActivitySensorReadingChangeReport = *mut ::core::ffi::c_void;
pub type ActivitySensorReadingChangedEventArgs = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct ActivitySensorReadingConfidence(pub i32);
impl ActivitySensorReadingConfidence {
pub const High: Self = Self(0i32);
pub const Low: Self = Self(1i32);
}
impl ::core::marker::Copy for ActivitySensorReadingConfidence {}
impl ::core::clone::Clone for ActivitySensorReadingConfidence {
fn clone(&self) -> Self {
*self
}
}
pub type ActivitySensorTriggerDetails = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct ActivityType(pub i32);
impl ActivityType {
pub const Unknown: Self = Self(0i32);
pub const Idle: Self = Self(1i32);
pub const Stationary: Self = Self(2i32);
pub const Fidgeting: Self = Self(3i32);
pub const Walking: Self = Self(4i32);
pub const Running: Self = Self(5i32);
pub const InVehicle: Self = Self(6i32);
pub const Biking: Self = Self(7i32);
}
impl ::core::marker::Copy for ActivityType {}
impl ::core::clone::Clone for ActivityType {
fn clone(&self) -> Self {
*self
}
}
pub type Altimeter = *mut ::core::ffi::c_void;
pub type AltimeterReading = *mut ::core::ffi::c_void;
pub type AltimeterReadingChangedEventArgs = *mut ::core::ffi::c_void;
pub type Barometer = *mut ::core::ffi::c_void;
pub type BarometerDataThreshold = *mut ::core::ffi::c_void;
pub type BarometerReading = *mut ::core::ffi::c_void;
pub type BarometerReadingChangedEventArgs = *mut ::core::ffi::c_void;
pub type Compass = *mut ::core::ffi::c_void;
pub type CompassDataThreshold = *mut ::core::ffi::c_void;
pub type CompassReading = *mut ::core::ffi::c_void;
pub type CompassReadingChangedEventArgs = *mut ::core::ffi::c_void;
pub type Gyrometer = *mut ::core::ffi::c_void;
pub type GyrometerDataThreshold = *mut ::core::ffi::c_void;
pub type GyrometerReading = *mut ::core::ffi::c_void;
pub type GyrometerReadingChangedEventArgs = *mut ::core::ffi::c_void;
pub type HingeAngleReading = *mut ::core::ffi::c_void;
pub type HingeAngleSensor = *mut ::core::ffi::c_void;
pub type HingeAngleSensorReadingChangedEventArgs = *mut ::core::ffi::c_void;
pub type ISensorDataThreshold = *mut ::core::ffi::c_void;
pub type Inclinometer = *mut ::core::ffi::c_void;
pub type InclinometerDataThreshold = *mut ::core::ffi::c_void;
pub type InclinometerReading = *mut ::core::ffi::c_void;
pub type InclinometerReadingChangedEventArgs = *mut ::core::ffi::c_void;
pub type LightSensor = *mut ::core::ffi::c_void;
pub type LightSensorDataThreshold = *mut ::core::ffi::c_void;
pub type LightSensorReading = *mut ::core::ffi::c_void;
pub type LightSensorReadingChangedEventArgs = *mut ::core::ffi::c_void;
pub type Magnetometer = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct MagnetometerAccuracy(pub i32);
impl MagnetometerAccuracy {
pub const Unknown: Self = Self(0i32);
pub const Unreliable: Self = Self(1i32);
pub const Approximate: Self = Self(2i32);
pub const High: Self = Self(3i32);
}
impl ::core::marker::Copy for MagnetometerAccuracy {}
impl ::core::clone::Clone for MagnetometerAccuracy {
fn clone(&self) -> Self {
*self
}
}
pub type MagnetometerDataThreshold = *mut ::core::ffi::c_void;
pub type MagnetometerReading = *mut ::core::ffi::c_void;
pub type MagnetometerReadingChangedEventArgs = *mut ::core::ffi::c_void;
pub type OrientationSensor = *mut ::core::ffi::c_void;
pub type OrientationSensorReading = *mut ::core::ffi::c_void;
pub type OrientationSensorReadingChangedEventArgs = *mut ::core::ffi::c_void;
pub type Pedometer = *mut ::core::ffi::c_void;
pub type PedometerDataThreshold = *mut ::core::ffi::c_void;
pub type PedometerReading = *mut ::core::ffi::c_void;
pub type PedometerReadingChangedEventArgs = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct PedometerStepKind(pub i32);
impl PedometerStepKind {
pub const Unknown: Self = Self(0i32);
pub const Walking: Self = Self(1i32);
pub const Running: Self = Self(2i32);
}
impl ::core::marker::Copy for PedometerStepKind {}
impl ::core::clone::Clone for PedometerStepKind {
fn clone(&self) -> Self {
*self
}
}
pub type ProximitySensor = *mut ::core::ffi::c_void;
pub type ProximitySensorDataThreshold = *mut ::core::ffi::c_void;
pub type ProximitySensorDisplayOnOffController = *mut ::core::ffi::c_void;
pub type ProximitySensorReading = *mut ::core::ffi::c_void;
pub type ProximitySensorReadingChangedEventArgs = *mut ::core::ffi::c_void;
pub type SensorDataThresholdTriggerDetails = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct SensorOptimizationGoal(pub i32);
impl SensorOptimizationGoal {
pub const Precision: Self = Self(0i32);
pub const PowerEfficiency: Self = Self(1i32);
}
impl ::core::marker::Copy for SensorOptimizationGoal {}
impl ::core::clone::Clone for SensorOptimizationGoal {
fn clone(&self) -> Self {
*self
}
}
pub type SensorQuaternion = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct SensorReadingType(pub i32);
impl SensorReadingType {
pub const Absolute: Self = Self(0i32);
pub const Relative: Self = Self(1i32);
}
impl ::core::marker::Copy for SensorReadingType {}
impl ::core::clone::Clone for SensorReadingType {
fn clone(&self) -> Self {
*self
}
}
pub type SensorRotationMatrix = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct SensorType(pub i32);
impl SensorType {
pub const Accelerometer: Self = Self(0i32);
pub const ActivitySensor: Self = Self(1i32);
pub const Barometer: Self = Self(2i32);
pub const Compass: Self = Self(3i32);
pub const CustomSensor: Self = Self(4i32);
pub const Gyroscope: Self = Self(5i32);
pub const ProximitySensor: Self = Self(6i32);
pub const Inclinometer: Self = Self(7i32);
pub const LightSensor: Self = Self(8i32);
pub const OrientationSensor: Self = Self(9i32);
pub const Pedometer: Self = Self(10i32);
pub const RelativeInclinometer: Self = Self(11i32);
pub const RelativeOrientationSensor: Self = Self(12i32);
pub const SimpleOrientationSensor: Self = Self(13i32);
}
impl ::core::marker::Copy for SensorType {}
impl ::core::clone::Clone for SensorType {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct SimpleOrientation(pub i32);
impl SimpleOrientation {
pub const NotRotated: Self = Self(0i32);
pub const Rotated90DegreesCounterclockwise: Self = Self(1i32);
pub const Rotated180DegreesCounterclockwise: Self = Self(2i32);
pub const Rotated270DegreesCounterclockwise: Self = Self(3i32);
pub const Faceup: Self = Self(4i32);
pub const Facedown: Self = Self(5i32);
}
impl ::core::marker::Copy for SimpleOrientation {}
impl ::core::clone::Clone for SimpleOrientation {
fn clone(&self) -> Self {
*self
}
}
pub type SimpleOrientationSensor = *mut ::core::ffi::c_void;
pub type SimpleOrientationSensorOrientationChangedEventArgs = *mut ::core::ffi::c_void;
|
use std::fs;
#[test]
fn validate_1_1() {
assert_eq!(algorithm("src/day_1/input_test.txt", false).0, 514579);
}
fn algorithm(file_location: &str, print_results: bool) -> (i32, usize) {
let contents = fs::read_to_string(file_location).unwrap();
let values: Vec<&str> = contents.lines().collect();
let mut count = 0;
let mut result = 0;
for (i, x) in values.iter().enumerate() {
let a: i32 = x.parse().unwrap();
for y in values.clone().drain(i + 1..values.len()) {
count = count + 1;
let b: i32 = y.parse().unwrap();
if a + b == 2020 {
result = a * b;
if print_results {
println!("{} + {} = 2020", a, b);
println!("{} * {} = {}", a, b, result);
}
}
}
}
(result, count)
}
pub fn run() {
let (result, iterations) = algorithm("src/day_1/input.txt", true);
println!("Result: {}. Amount of iterations: {}.", result, iterations);
}
|
use aoc2018::*;
#[derive(Default, Debug)]
struct Node {
metadata: Vec<u32>,
children: Vec<Node>,
}
impl Node {
fn part1sum(&self) -> u32 {
self.metadata.iter().cloned().sum::<u32>()
+ self.children.iter().map(|c| c.part1sum()).sum::<u32>()
}
fn part2sum(&self) -> u32 {
if self.children.is_empty() {
self.metadata.iter().cloned().sum::<u32>()
} else {
let mut r = 0;
for m in self.metadata.iter().cloned() {
r += self
.children
.get(m as usize - 1)
.map(|c| c.part2sum())
.unwrap_or_default();
}
r
}
}
fn decode(it: &mut impl Iterator<Item = u32>) -> Option<Node> {
let children = match it.next() {
None => return None,
Some(first) => first,
};
let mut node = Node::default();
let metadata = it.next().expect("metadata");
for _ in 0..children {
node.children.extend(Self::decode(it));
}
for _ in 0..metadata {
node.metadata.push(it.next().expect("metadata value"));
}
Some(node)
}
}
fn main() -> Result<(), Error> {
let input = columns!(input!("day8.txt"), char::is_whitespace, u32);
let mut it = input.iter().cloned();
let node = Node::decode(&mut it).expect("no nodes in input");
assert_eq!(node.part1sum(), 47647);
assert_eq!(node.part2sum(), 23636);
Ok(())
}
|
#[path = "process_flag_2/with_atom_flag.rs"]
mod with_atom_flag;
// `without_atom_flag_errors_badarg` in unit tests
|
mod error;
use gstreamer::glib::SendValue;
use gstreamer::prelude::*;
use gstreamer::*;
use gstreamer_pbutils::prelude::*;
use gstreamer_pbutils::{
pb_utils_get_codec_description, Discoverer, DiscovererContainerInfo, DiscovererInfo,
DiscovererResult, DiscovererStreamInfo,
};
use std::env::args;
use std::iter::Iterator;
/* Print a tag in a human-readable format (name: value) */
fn print_tag_foreach(val: &SendValue, tag: &str, depth: usize) {
let str: String = if let Ok(str) = val.get::<&str>() {
str.into()
} else {
val.serialize().unwrap().into()
};
let indent = 2 * depth;
println!("{:>indent$}{}: {}", ' ', tag_get_nick(tag), str);
}
/* Print information regarding a stream */
fn print_stream_info(info: &DiscovererStreamInfo, depth: usize) {
let desc = if let Some(caps) = info.caps() {
if caps.is_fixed() {
pb_utils_get_codec_description(&caps).into()
} else {
caps.to_string()
}
} else {
"".into()
};
let indent = 2 * depth;
println!("{:<indent$}{}: {}", ' ', info.stream_type_nick(), desc);
drop(desc);
if let Some(tags) = info.tags() {
let indent = 2 * (depth + 1);
println!("{:<indent$}Tags:", ' ');
for (tag, value) in tags.iter() {
print_tag_foreach(&value, tag, depth + 2);
}
}
}
/* Print information regarding a stream and its substreams, if any */
fn print_topology(info: &DiscovererStreamInfo, depth: usize) {
print_stream_info(info, depth);
let next = info.next();
if let Some(next) = next {
print_topology(&next, depth + 1);
} else if let Some(info) = info.downcast_ref::<DiscovererContainerInfo>() {
let streams = info.streams();
for tmpinf in streams {
print_topology(&tmpinf, depth + 1);
}
}
}
/* This function is called every time the discoverer has information regarding
* one of the URIs we provided.*/
fn on_discovered_cb(info: &DiscovererInfo, err: Option<&glib::Error>) {
let uri = info.uri();
let result = info.result();
match result {
DiscovererResult::UriInvalid => {
println!("Invalid URI '{}'", uri);
}
DiscovererResult::Error => {
println!("Discoverer error: {}", err.unwrap().message());
}
DiscovererResult::Timeout => {
println!("Timeout");
}
DiscovererResult::Busy => {
println!("Busy");
}
DiscovererResult::MissingPlugins => {
println!("Missing plugins: {}", info.misc().unwrap());
}
DiscovererResult::Ok => {
println!("Discovered '{}'", uri);
}
_ => panic!(),
}
if result != DiscovererResult::Ok {
eprintln!("This URI cannot be played");
return;
}
/* If we got no error, show the retrieved information */
println!("\nDuration: {}", info.duration().unwrap());
if let Some(tags) = info.tags() {
println!("Tags:");
for (tag, value) in tags.iter() {
print_tag_foreach(&value, tag, 1);
}
}
if info.is_seekable() {
println!("Seekable: yes");
} else {
println!("Seekable: no");
}
println!("\n");
let sinfo = info.stream_info();
if sinfo.is_none() {
return;
}
let sinfo = sinfo.unwrap();
println!("Stream information:");
print_topology(&sinfo, 1);
println!();
}
/* This function is called when the discoverer has finished examining
* all the URIs we provided.*/
fn on_finished_cb(main_loop: &glib::MainLoop) {
println!("Finished discovering");
main_loop.quit();
}
fn main() -> Result<(), error::Error> {
let uri = if let Some(arg) = args().collect::<Vec<_>>().get(1) {
/* if a URI was provided, use it instead of the default one */
arg.to_string()
} else {
"https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm"
.into()
};
/* Initialize GStreamer */
gstreamer::init().unwrap();
println!("Discovering '{}'", uri);
/* Instantiate the Discoverer */
let discoverer = Discoverer::new(ClockTime::from_seconds(5))
.map_err(|err| format!("Error creating discoverer instance: {}\n", err))
.unwrap();
/* Connect to the interesting signals */
discoverer.connect_discovered(|_, info, err| {
on_discovered_cb(info, err);
});
/* Create a GLib Main Loop and set it to run, so we can wait for the signals */
let main_loop = glib::MainLoop::new(None, false);
let main_loop_for_finished = main_loop.clone();
discoverer.connect_finished(move |_| {
on_finished_cb(&main_loop_for_finished);
});
/* Start the discoverer process (nothing to do yet) */
discoverer.start();
/* Add a request to process asynchronously the URI passed through the command line */
if let Err(_) = discoverer.discover_uri_async(&uri) {
println!("Failed to start discovering URI '{}'", uri);
panic!();
}
main_loop.run();
/* Stop the discoverer process */
discoverer.stop();
/* Free resources */
Ok(())
}
|
use migration::Migrator;
use sea_schema::migration::*;
#[async_std::main]
async fn main() {
cli::run_cli(Migrator).await;
}
|
use crate::{
demos::{Chunk, Demo},
types::HitableList,
Camera,
};
pub struct SimpleRectangle;
impl Demo for SimpleRectangle {
fn name(&self) -> &'static str {
"simple_rectangle"
}
fn render_chunk(
&self,
chunk: &mut Chunk,
_camera: Option<&Camera>,
_world: Option<&HitableList>,
_samples: u8,
) {
let &mut Chunk {
x,
y,
nx,
ny,
start_x,
start_y,
ref mut buffer,
} = chunk;
let mut offset = 0;
for j in start_y..start_y + ny {
for i in start_x..start_x + nx {
let color = [i as f64 / x as f64, j as f64 / y as f64, 0.2];
buffer[offset] = (255.99 * color[0]) as u8;
buffer[offset + 1] = (255.99 * color[1]) as u8;
buffer[offset + 2] = (255.99 * color[2]) as u8;
offset += 4;
}
}
}
}
|
use std::collections::HashMap;
use petgraph::{Graph, Directed};
use petgraph::graph::NodeIndex;
use petgraph::visit::GetAdjacencyMatrix;
use fixedbitset::FixedBitSet;
fn bary_center<N, E>(
graph: &Graph<N, E, Directed>,
matrix: &FixedBitSet,
h1: &Vec<NodeIndex>,
h2: &Vec<NodeIndex>,
) -> HashMap<NodeIndex, f64> {
let mut result = HashMap::new();
for v in h2 {
let mut sum = 0;
let mut count = 0;
for (i, u) in h1.iter().enumerate() {
if graph.is_adjacent(&matrix, u.clone(), v.clone()) {
sum += i;
count += 1;
}
}
result.insert(v.clone(), sum as f64 / count as f64);
}
result
}
pub fn crossing_reduction<N, E>(
graph: &Graph<N, E, Directed>,
matrix: &FixedBitSet,
h1: &Vec<NodeIndex>,
h2: &mut Vec<NodeIndex>,
) {
let values = bary_center(graph, matrix, h1, h2);
h2.sort_by(|u, v| {
let cu = values.get(u).unwrap();
let cv = values.get(v).unwrap();
cu.partial_cmp(cv).unwrap()
});
}
#[cfg(test)]
mod tests {
use petgraph::Graph;
use petgraph::visit::GetAdjacencyMatrix;
use super::*;
#[test]
fn it_works() {
let mut graph = Graph::<&str, &str>::new();
let u1 = graph.add_node("u1");
let u2 = graph.add_node("u2");
let u3 = graph.add_node("u3");
let u4 = graph.add_node("u4");
let v1 = graph.add_node("v1");
let v2 = graph.add_node("v2");
let v3 = graph.add_node("v3");
graph.add_edge(u1, v2, "");
graph.add_edge(u2, v2, "");
graph.add_edge(u2, v3, "");
graph.add_edge(u3, v1, "");
graph.add_edge(u3, v3, "");
graph.add_edge(u4, v2, "");
let h1 = vec![u1, u2, u3, u4];
let mut h2 = vec![v1, v2, v3];
let matrix = graph.adjacency_matrix();
crossing_reduction(&graph, &matrix, &h1, &mut h2);
assert_eq!(h2, vec![v2, v3, v1]);
}
}
|
use clap::{crate_description, crate_name, crate_version, App, Arg};
use log::*;
use morgan::clusterMessage::{Node, FULLNODE_PORT_RANGE};
use morgan::connectionInfo::ContactInfo;
use morgan::localVoteSignerService::LocalVoteSignerService;
use morgan::service::Service;
use morgan::socketaddr;
use morgan::verifier::{Validator, ValidatorConfig};
use morgan_netutil::parse_port_range;
use morgan_interface::signature::{read_keypair, Keypair, KeypairUtil};
use std::fs::File;
use std::net::SocketAddr;
use std::process::exit;
use std::sync::Arc;
use morgan_helper::logHelper::*;
fn port_range_validator(port_range: String) -> Result<(), String> {
if parse_port_range(&port_range).is_some() {
Ok(())
} else {
Err("Invalid port range".to_string())
}
}
fn main() {
morgan_logger::setup();
morgan_metricbot::set_panic_hook("validator");
let default_dynamic_port_range =
&format!("{}-{}", FULLNODE_PORT_RANGE.0, FULLNODE_PORT_RANGE.1);
let matches = App::new(crate_name!()).about(crate_description!())
.version(crate_version!())
.arg(
Arg::with_name("blockstream")
.long("blockstream")
.takes_value(true)
.value_name("UNIX DOMAIN SOCKET")
.help("Open blockstream at this unix domain socket location")
)
.arg(
Arg::with_name("identity")
.short("i")
.long("identity")
.value_name("PATH")
.takes_value(true)
.help("File containing an identity (keypair)"),
)
.arg(
Arg::with_name("vote_account")
.long("vote-account")
.value_name("PUBKEY_BASE58_STR")
.takes_value(true)
.help("Public key of the vote account, where to send votes"),
)
.arg(
Arg::with_name("voting_keypair")
.long("voting-keypair")
.value_name("PATH")
.takes_value(true)
.help("File containing the authorized voting keypair"),
)
.arg(
Arg::with_name("storage_keypair")
.long("storage-keypair")
.value_name("PATH")
.takes_value(true)
.required(true)
.help("File containing the storage account keypair"),
)
.arg(
Arg::with_name("init_complete_file")
.long("init-complete-file")
.value_name("FILE")
.takes_value(true)
.help("Create this file, if it doesn't already exist, once node initialization is complete"),
)
.arg(
Arg::with_name("ledger")
.short("l")
.long("ledger")
.value_name("DIR")
.takes_value(true)
.required(true)
.help("Use DIR as persistent ledger location"),
)
.arg(
Arg::with_name("entrypoint")
.short("n")
.long("entrypoint")
.value_name("HOST:PORT")
.takes_value(true)
.help("Rendezvous with the cluster at this entry point"),
)
.arg(
Arg::with_name("no_voting")
.long("no-voting")
.takes_value(false)
.help("Launch node without voting"),
)
.arg(
Arg::with_name("no_sigverify")
.short("v")
.long("no-sigverify")
.takes_value(false)
.help("Run without signature verification"),
)
.arg(
Arg::with_name("rpc_port")
.long("rpc-port")
.value_name("PORT")
.takes_value(true)
.help("RPC port to use for this node"),
)
.arg(
Arg::with_name("enable_rpc_exit")
.long("enable-rpc-exit")
.takes_value(false)
.help("Enable the JSON RPC 'fullnodeExit' API. Only enable in a debug environment"),
)
.arg(
Arg::with_name("rpc_drone_address")
.long("rpc-drone-address")
.value_name("HOST:PORT")
.takes_value(true)
.help("Enable the JSON RPC 'requestAirdrop' API with this drone address."),
)
.arg(
Arg::with_name("signer")
.short("s")
.long("signer")
.value_name("HOST:PORT")
.takes_value(true)
.help("Rendezvous with the vote signer at this RPC end point"),
)
.arg(
Arg::with_name("accounts")
.short("a")
.long("accounts")
.value_name("PATHS")
.takes_value(true)
.help("Comma separated persistent accounts location"),
)
.arg(
clap::Arg::with_name("gossip_port")
.long("gossip-port")
.value_name("HOST:PORT")
.takes_value(true)
.help("Gossip port number for the node"),
)
.arg(
clap::Arg::with_name("dynamic_port_range")
.long("dynamic-port-range")
.value_name("MIN_PORT-MAX_PORT")
.takes_value(true)
.default_value(default_dynamic_port_range)
.validator(port_range_validator)
.help("Range to use for dynamically assigned ports"),
)
.get_matches();
let mut validator_config = ValidatorConfig::default();
let keypair = if let Some(identity) = matches.value_of("identity") {
read_keypair(identity).unwrap_or_else(|err| {
eprintln!("{}: Unable to open keypair file: {}", err, identity);
exit(1);
})
} else {
Keypair::new()
};
let voting_keypair = if let Some(identity) = matches.value_of("voting_keypair") {
read_keypair(identity).unwrap_or_else(|err| {
eprintln!("{}: Unable to open keypair file: {}", err, identity);
exit(1);
})
} else {
Keypair::new()
};
let storage_keypair = if let Some(storage_keypair) = matches.value_of("storage_keypair") {
read_keypair(storage_keypair).unwrap_or_else(|err| {
eprintln!("{}: Unable to open keypair file: {}", err, storage_keypair);
exit(1);
})
} else {
Keypair::new()
};
let staking_account = matches
.value_of("staking_account")
.map_or(voting_keypair.pubkey(), |pubkey| {
pubkey.parse().expect("failed to parse staking_account")
});
let ledger_path = matches.value_of("ledger").unwrap();
validator_config.sigverify_disabled = matches.is_present("no_sigverify");
validator_config.voting_disabled = matches.is_present("no_voting");
if matches.is_present("enable_rpc_exit") {
validator_config.rpc_config.enable_fullnode_exit = true;
}
validator_config.rpc_config.drone_addr = matches.value_of("rpc_drone_address").map(|address| {
morgan_netutil::parse_host_port(address).expect("failed to parse drone address")
});
let dynamic_port_range = parse_port_range(matches.value_of("dynamic_port_range").unwrap())
.expect("invalid dynamic_port_range");
let mut gossip_addr = morgan_netutil::parse_port_or_addr(
matches.value_of("gossip_port"),
socketaddr!(
[127, 0, 0, 1],
morgan_netutil::find_available_port_in_range(dynamic_port_range)
.expect("unable to find an available gossip port")
),
);
if let Some(paths) = matches.value_of("accounts") {
validator_config.account_paths = Some(paths.to_string());
} else {
validator_config.account_paths = None;
}
let cluster_entrypoint = matches.value_of("entrypoint").map(|entrypoint| {
let entrypoint_addr = morgan_netutil::parse_host_port(entrypoint)
.expect("failed to parse entrypoint address");
gossip_addr.set_ip(morgan_netutil::get_public_ip_addr(&entrypoint_addr).unwrap());
ContactInfo::new_gossip_entry_point(&entrypoint_addr)
});
let (_signer_service, _signer_addr) = if let Some(signer_addr) = matches.value_of("signer") {
(
None,
signer_addr.to_string().parse().expect("Signer IP Address"),
)
} else {
// Run a local vote signer if a vote signer service address was not provided
let (signer_service, signer_addr) = LocalVoteSignerService::new(dynamic_port_range);
(Some(signer_service), signer_addr)
};
let init_complete_file = matches.value_of("init_complete_file");
validator_config.blockstream = matches.value_of("blockstream").map(ToString::to_string);
let keypair = Arc::new(keypair);
let mut node = Node::new_with_external_ip(&keypair.pubkey(), &gossip_addr, dynamic_port_range);
if let Some(port) = matches.value_of("rpc_port") {
let port_number = port.to_string().parse().expect("integer");
if port_number == 0 {
eprintln!("Invalid RPC port requested: {:?}", port);
exit(1);
}
node.info.rpc = SocketAddr::new(gossip_addr.ip(), port_number);
node.info.rpc_pubsub = SocketAddr::new(gossip_addr.ip(), port_number + 1);
};
let validator = Validator::new(
node,
&keypair,
ledger_path,
&staking_account,
&Arc::new(voting_keypair),
&Arc::new(storage_keypair),
cluster_entrypoint.as_ref(),
&validator_config,
);
if let Some(filename) = init_complete_file {
File::create(filename).unwrap_or_else(|_| panic!("Unable to create: {}", filename));
}
// info!("{}", Info(format!("Validator initialized").to_string()));
println!("{}",
printLn(
format!("Verifier initialized").to_string(),
module_path!().to_string()
)
);
validator.join().expect("Verifier exit");
// info!("{}", Info(format!("Validator exiting..").to_string()));
println!("{}",
printLn(
format!("Verifier exiting..").to_string(),
module_path!().to_string()
)
);
}
|
pub mod pos ;
mod map ;
pub mod error;
pub use map::* ;
|
use std::{
any::Any,
sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
};
use async_trait::async_trait;
use futures::{
future::{BoxFuture, Shared},
FutureExt, TryFutureExt,
};
use tokio::sync::mpsc;
use tokio::task::JoinError;
use observability_deps::tracing::{error, info, warn};
use trace::{span::Span, TraceCollector};
/// Size of the exporter buffer
const CHANNEL_SIZE: usize = 100_000;
/// An `AsyncExport` is a batched async version of `trace::TraceCollector`
#[async_trait]
pub trait AsyncExport: Send + 'static {
async fn export(&mut self, span: Vec<Span>);
}
/// `AsyncExporter` wraps a `AsyncExport` and sinks spans to it
///
/// In order to do this it spawns a background worker that pulls messages
/// off a queue and writes them to the `AsyncExport`.
///
/// If this worker cannot keep up, and this queue fills up, spans will
/// be dropped and warnings logged
///
/// Note: Currently this does not batch spans (#2392)
#[derive(Debug)]
pub struct AsyncExporter {
join: Shared<BoxFuture<'static, Result<(), Arc<JoinError>>>>,
/// Communication queue with the background worker
///
/// Sending None triggers termination
sender: tokio::sync::mpsc::Sender<Option<Span>>,
/// Flags if we already warned about a saturated channel.
warned_sender_full: AtomicBool,
}
impl AsyncExporter {
/// Creates a new `AsyncExporter`
pub fn new<T: AsyncExport>(collector: T) -> Self {
let (sender, receiver) = mpsc::channel(CHANNEL_SIZE);
let handle = tokio::spawn(background_worker(collector, receiver));
let join = handle.map_err(Arc::new).boxed().shared();
Self {
join,
sender,
warned_sender_full: AtomicBool::new(false),
}
}
/// Triggers shutdown of this `AsyncExporter` and waits until all in-flight
/// spans have been published to the `AsyncExport`
pub async fn drain(&self) -> Result<(), Arc<JoinError>> {
info!("batched exporter shutting down");
let _ = self.sender.send(None).await;
self.join.clone().await
}
}
impl TraceCollector for AsyncExporter {
fn export(&self, span: Span) {
use mpsc::error::TrySendError;
match self.sender.try_send(Some(span)) {
Ok(_) => {
// sending worked again, so re-enable warning
self.warned_sender_full.store(false, Ordering::SeqCst);
//TODO: Increment some metric (#2613)
}
Err(TrySendError::Full(_)) => {
// avoid spamming the log system (there might be thousands of traces incoming)
if !self.warned_sender_full.swap(true, Ordering::SeqCst) {
warn!("exporter cannot keep up, dropping spans");
}
}
Err(TrySendError::Closed(_)) => {
warn!("background worker shutdown")
}
}
}
fn as_any(&self) -> &dyn Any {
self
}
}
async fn background_worker<T: AsyncExport>(
mut exporter: T,
mut receiver: mpsc::Receiver<Option<Span>>,
) {
loop {
match receiver.recv().await {
Some(Some(span)) => exporter.export(vec![span]).await,
Some(None) => {
info!("async exporter shut down");
break;
}
None => {
error!("sender-side of async exporter dropped without waiting for shut down");
break;
}
}
}
}
/// An `AsyncExporter` that sinks writes to a tokio mpsc channel.
///
/// Intended for testing ONLY
///
#[derive(Debug)]
pub struct TestAsyncExporter {
channel: mpsc::Sender<Span>,
}
impl TestAsyncExporter {
pub fn new(channel: mpsc::Sender<Span>) -> Self {
Self { channel }
}
}
#[async_trait]
impl AsyncExport for TestAsyncExporter {
async fn export(&mut self, batch: Vec<Span>) {
for span in batch {
self.channel.send(span).await.expect("channel closed")
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use trace::ctx::SpanContext;
#[tokio::test]
async fn test_exporter() {
let (sender, mut receiver) = mpsc::channel(10);
let exporter = AsyncExporter::new(TestAsyncExporter::new(sender));
let root = SpanContext::new(Arc::new(trace::LogTraceCollector::new()));
let s1 = root.child("foo");
let s2 = root.child("bar");
exporter.export(s1.clone());
exporter.export(s2.clone());
exporter.export(s2.clone());
// Drain should wait for all published spans to be flushed
exporter.drain().await.unwrap();
let r1 = receiver.recv().await.unwrap();
let r2 = receiver.recv().await.unwrap();
let r3 = receiver.recv().await.unwrap();
// Should not be fatal despite exporter having been shutdown
exporter.export(s2.clone());
assert_eq!(root.span_id.get(), r1.ctx.parent_span_id.unwrap().get());
assert_eq!(s1.ctx.span_id.get(), r1.ctx.span_id.get());
assert_eq!(s1.ctx.trace_id.get(), r1.ctx.trace_id.get());
assert_eq!(root.span_id.get(), r2.ctx.parent_span_id.unwrap().get());
assert_eq!(s2.ctx.span_id.get(), r2.ctx.span_id.get());
assert_eq!(s2.ctx.trace_id.get(), r2.ctx.trace_id.get());
assert_eq!(root.span_id.get(), r3.ctx.parent_span_id.unwrap().get());
assert_eq!(s2.ctx.span_id.get(), r3.ctx.span_id.get());
assert_eq!(s2.ctx.trace_id.get(), r3.ctx.trace_id.get());
}
}
|
use near_sdk::json_types::ValidAccountId;
use near_sdk::{
borsh::{self, BorshDeserialize, BorshSerialize},
env,
};
use std::convert::TryInto;
#[derive(
BorshDeserialize,
BorshSerialize,
Clone,
Copy,
PartialEq,
Eq,
Hash,
Debug,
Ord,
PartialOrd,
Default,
)]
pub struct Hash([u8; Hash::LENGTH]);
impl Hash {
const LENGTH: usize = 32;
}
impl From<[u8; Hash::LENGTH]> for Hash {
fn from(value: [u8; Hash::LENGTH]) -> Self {
Self(value)
}
}
impl From<&[u8]> for Hash {
fn from(value: &[u8]) -> Self {
assert!(value.len() > 0, "value cannot be empty");
let hash = env::sha256(value);
Self(hash.try_into().unwrap())
}
}
impl From<&str> for Hash {
fn from(value: &str) -> Self {
assert!(value.len() > 0, "value cannot be empty");
let hash = env::sha256(value.as_bytes());
Self(hash.try_into().unwrap())
}
}
impl From<&String> for Hash {
fn from(value: &String) -> Self {
value.as_str().into()
}
}
impl From<ValidAccountId> for Hash {
fn from(account_id: ValidAccountId) -> Self {
Hash::from(account_id.as_ref())
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::test_utils::*;
use near_sdk::{testing_env, MockedBlockchain};
#[test]
fn hash_from_string() {
let account_id = "alfio-zappala.near".to_string();
let context = new_context(&account_id);
testing_env!(context);
let data = "Alfio Zappala";
let hash = Hash::from(data);
let hash2 = Hash::from(data);
assert_eq!(hash, hash2);
}
#[test]
#[should_panic(expected = "value cannot be empty")]
fn hash_from_empty_string() {
let account_id = "alfio-zappala.near".to_string();
let context = new_context(&account_id);
testing_env!(context);
Hash::from("");
}
#[test]
fn hash_from_bytes() {
let account_id = "alfio-zappala.near".to_string();
let context = new_context(&account_id);
testing_env!(context);
let data = "Alfio Zappala II";
let hash = Hash::from(data.as_bytes());
let hash2 = Hash::from(data.as_bytes());
assert_eq!(hash, hash2);
}
#[test]
#[should_panic(expected = "value cannot be empty")]
fn hash_from_empty_bytes() {
let account_id = "alfio-zappala.near".to_string();
let context = new_context(&account_id);
testing_env!(context);
Hash::from("".as_bytes());
}
}
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[link(name = "windows")]
extern "system" {
#[cfg(feature = "Win32_Graphics_Gdi")]
pub fn AddStroke(hrc: HRECOCONTEXT, ppacketdesc: *const PACKET_DESCRIPTION, cbpacket: u32, ppacket: *const u8, pxform: *const super::super::Graphics::Gdi::XFORM) -> ::windows_sys::core::HRESULT;
#[cfg(feature = "Win32_Foundation")]
pub fn AddWordsToWordList(hwl: HRECOWORDLIST, pwcwords: super::super::Foundation::PWSTR) -> ::windows_sys::core::HRESULT;
#[cfg(feature = "Win32_Foundation")]
pub fn AdviseInkChange(hrc: HRECOCONTEXT, bnewstroke: super::super::Foundation::BOOL) -> ::windows_sys::core::HRESULT;
pub fn CreateContext(hrec: HRECOGNIZER, phrc: *mut HRECOCONTEXT) -> ::windows_sys::core::HRESULT;
pub fn CreateRecognizer(pclsid: *mut ::windows_sys::core::GUID, phrec: *mut HRECOGNIZER) -> ::windows_sys::core::HRESULT;
pub fn DestroyContext(hrc: HRECOCONTEXT) -> ::windows_sys::core::HRESULT;
pub fn DestroyRecognizer(hrec: HRECOGNIZER) -> ::windows_sys::core::HRESULT;
pub fn DestroyWordList(hwl: HRECOWORDLIST) -> ::windows_sys::core::HRESULT;
pub fn EndInkInput(hrc: HRECOCONTEXT) -> ::windows_sys::core::HRESULT;
pub fn GetAllRecognizers(recognizerclsids: *mut *mut ::windows_sys::core::GUID, count: *mut u32) -> ::windows_sys::core::HRESULT;
#[cfg(feature = "Win32_Foundation")]
pub fn GetBestResultString(hrc: HRECOCONTEXT, pcsize: *mut u32, pwcbestresult: super::super::Foundation::PWSTR) -> ::windows_sys::core::HRESULT;
pub fn GetLatticePtr(hrc: HRECOCONTEXT, pplattice: *mut *mut RECO_LATTICE) -> ::windows_sys::core::HRESULT;
#[cfg(feature = "Win32_Foundation")]
pub fn GetLeftSeparator(hrc: HRECOCONTEXT, pcsize: *mut u32, pwcleftseparator: super::super::Foundation::PWSTR) -> ::windows_sys::core::HRESULT;
pub fn GetRecoAttributes(hrec: HRECOGNIZER, precoattrs: *mut RECO_ATTRS) -> ::windows_sys::core::HRESULT;
pub fn GetResultPropertyList(hrec: HRECOGNIZER, ppropertycount: *mut u32, ppropertyguid: *mut ::windows_sys::core::GUID) -> ::windows_sys::core::HRESULT;
#[cfg(feature = "Win32_Foundation")]
pub fn GetRightSeparator(hrc: HRECOCONTEXT, pcsize: *mut u32, pwcrightseparator: super::super::Foundation::PWSTR) -> ::windows_sys::core::HRESULT;
pub fn GetUnicodeRanges(hrec: HRECOGNIZER, pcranges: *mut u32, pcr: *mut CHARACTER_RANGE) -> ::windows_sys::core::HRESULT;
#[cfg(feature = "Win32_Foundation")]
pub fn IsStringSupported(hrc: HRECOCONTEXT, wcstring: u32, pwcstring: super::super::Foundation::PWSTR) -> ::windows_sys::core::HRESULT;
pub fn LoadCachedAttributes(clsid: ::windows_sys::core::GUID, precoattributes: *mut RECO_ATTRS) -> ::windows_sys::core::HRESULT;
#[cfg(feature = "Win32_Foundation")]
pub fn MakeWordList(hrec: HRECOGNIZER, pbuffer: super::super::Foundation::PWSTR, phwl: *mut HRECOWORDLIST) -> ::windows_sys::core::HRESULT;
#[cfg(feature = "Win32_Foundation")]
pub fn Process(hrc: HRECOCONTEXT, pbpartialprocessing: *mut super::super::Foundation::BOOL) -> ::windows_sys::core::HRESULT;
pub fn SetEnabledUnicodeRanges(hrc: HRECOCONTEXT, cranges: u32, pcr: *mut CHARACTER_RANGE) -> ::windows_sys::core::HRESULT;
#[cfg(feature = "Win32_Foundation")]
pub fn SetFactoid(hrc: HRECOCONTEXT, cwcfactoid: u32, pwcfactoid: super::super::Foundation::PWSTR) -> ::windows_sys::core::HRESULT;
pub fn SetFlags(hrc: HRECOCONTEXT, dwflags: u32) -> ::windows_sys::core::HRESULT;
pub fn SetGuide(hrc: HRECOCONTEXT, pguide: *const RECO_GUIDE, iindex: u32) -> ::windows_sys::core::HRESULT;
#[cfg(feature = "Win32_Foundation")]
pub fn SetTextContext(hrc: HRECOCONTEXT, cwcbefore: u32, pwcbefore: super::super::Foundation::PWSTR, cwcafter: u32, pwcafter: super::super::Foundation::PWSTR) -> ::windows_sys::core::HRESULT;
pub fn SetWordList(hrc: HRECOCONTEXT, hwl: HRECOWORDLIST) -> ::windows_sys::core::HRESULT;
}
pub type ALT_BREAKS = i32;
pub const ALT_BREAKS_SAME: ALT_BREAKS = 0i32;
pub const ALT_BREAKS_UNIQUE: ALT_BREAKS = 1i32;
pub const ALT_BREAKS_FULL: ALT_BREAKS = 2i32;
pub const ASYNC_RECO_ADDSTROKE_FAILED: u32 = 4u32;
pub const ASYNC_RECO_INTERRUPTED: u32 = 1u32;
pub const ASYNC_RECO_PROCESS_FAILED: u32 = 2u32;
pub const ASYNC_RECO_RESETCONTEXT_FAILED: u32 = 16u32;
pub const ASYNC_RECO_SETCACMODE_FAILED: u32 = 8u32;
pub const ASYNC_RECO_SETFACTOID_FAILED: u32 = 128u32;
pub const ASYNC_RECO_SETFLAGS_FAILED: u32 = 64u32;
pub const ASYNC_RECO_SETGUIDE_FAILED: u32 = 32u32;
pub const ASYNC_RECO_SETTEXTCONTEXT_FAILED: u32 = 256u32;
pub const ASYNC_RECO_SETWORDLIST_FAILED: u32 = 512u32;
pub type AppearanceConstants = i32;
pub const rtfFlat: AppearanceConstants = 0i32;
pub const rtfThreeD: AppearanceConstants = 1i32;
pub const BEST_COMPLETE: u32 = 2u32;
pub type BorderStyleConstants = i32;
pub const rtfNoBorder: BorderStyleConstants = 0i32;
pub const rtfFixedSingle: BorderStyleConstants = 1i32;
pub const CAC_FULL: u32 = 0u32;
pub const CAC_PREFIX: u32 = 1u32;
pub const CAC_RANDOM: u32 = 2u32;
#[repr(C)]
pub struct CHARACTER_RANGE {
pub wcLow: u16,
pub cChars: u16,
}
impl ::core::marker::Copy for CHARACTER_RANGE {}
impl ::core::clone::Clone for CHARACTER_RANGE {
fn clone(&self) -> Self {
*self
}
}
pub type CONFIDENCE_LEVEL = i32;
pub const CFL_STRONG: CONFIDENCE_LEVEL = 0i32;
pub const CFL_INTERMEDIATE: CONFIDENCE_LEVEL = 1i32;
pub const CFL_POOR: CONFIDENCE_LEVEL = 2i32;
pub type CorrectionMode = i32;
pub const CorrectionMode_NotVisible: CorrectionMode = 0i32;
pub const CorrectionMode_PreInsertion: CorrectionMode = 1i32;
pub const CorrectionMode_PostInsertionCollapsed: CorrectionMode = 2i32;
pub const CorrectionMode_PostInsertionExpanded: CorrectionMode = 3i32;
pub type CorrectionPosition = i32;
pub const CorrectionPosition_Auto: CorrectionPosition = 0i32;
pub const CorrectionPosition_Bottom: CorrectionPosition = 1i32;
pub const CorrectionPosition_Top: CorrectionPosition = 2i32;
pub type DISPID_Ink = i32;
pub const DISPID_IStrokes: DISPID_Ink = 1i32;
pub const DISPID_IExtendedProperties: DISPID_Ink = 2i32;
pub const DISPID_IGetBoundingBox: DISPID_Ink = 3i32;
pub const DISPID_IDeleteStrokes: DISPID_Ink = 4i32;
pub const DISPID_IDeleteStroke: DISPID_Ink = 5i32;
pub const DISPID_IExtractStrokes: DISPID_Ink = 6i32;
pub const DISPID_IExtractWithRectangle: DISPID_Ink = 7i32;
pub const DISPID_IDirty: DISPID_Ink = 8i32;
pub const DISPID_ICustomStrokes: DISPID_Ink = 9i32;
pub const DISPID_IClone: DISPID_Ink = 10i32;
pub const DISPID_IHitTestCircle: DISPID_Ink = 11i32;
pub const DISPID_IHitTestWithRectangle: DISPID_Ink = 12i32;
pub const DISPID_IHitTestWithLasso: DISPID_Ink = 13i32;
pub const DISPID_INearestPoint: DISPID_Ink = 14i32;
pub const DISPID_ICreateStrokes: DISPID_Ink = 15i32;
pub const DISPID_ICreateStroke: DISPID_Ink = 16i32;
pub const DISPID_IAddStrokesAtRectangle: DISPID_Ink = 17i32;
pub const DISPID_IClip: DISPID_Ink = 18i32;
pub const DISPID_ISave: DISPID_Ink = 19i32;
pub const DISPID_ILoad: DISPID_Ink = 20i32;
pub const DISPID_ICreateStrokeFromPoints: DISPID_Ink = 21i32;
pub const DISPID_IClipboardCopyWithRectangle: DISPID_Ink = 22i32;
pub const DISPID_IClipboardCopy: DISPID_Ink = 23i32;
pub const DISPID_ICanPaste: DISPID_Ink = 24i32;
pub const DISPID_IClipboardPaste: DISPID_Ink = 25i32;
pub type DISPID_InkCollector = i32;
pub const DISPID_ICEnabled: DISPID_InkCollector = 1i32;
pub const DISPID_ICHwnd: DISPID_InkCollector = 2i32;
pub const DISPID_ICPaint: DISPID_InkCollector = 3i32;
pub const DISPID_ICText: DISPID_InkCollector = 4i32;
pub const DISPID_ICDefaultDrawingAttributes: DISPID_InkCollector = 5i32;
pub const DISPID_ICRenderer: DISPID_InkCollector = 6i32;
pub const DISPID_ICInk: DISPID_InkCollector = 7i32;
pub const DISPID_ICAutoRedraw: DISPID_InkCollector = 8i32;
pub const DISPID_ICCollectingInk: DISPID_InkCollector = 9i32;
pub const DISPID_ICSetEventInterest: DISPID_InkCollector = 10i32;
pub const DISPID_ICGetEventInterest: DISPID_InkCollector = 11i32;
pub const DISPID_IOEditingMode: DISPID_InkCollector = 12i32;
pub const DISPID_IOSelection: DISPID_InkCollector = 13i32;
pub const DISPID_IOAttachMode: DISPID_InkCollector = 14i32;
pub const DISPID_IOHitTestSelection: DISPID_InkCollector = 15i32;
pub const DISPID_IODraw: DISPID_InkCollector = 16i32;
pub const DISPID_IPPicture: DISPID_InkCollector = 17i32;
pub const DISPID_IPSizeMode: DISPID_InkCollector = 18i32;
pub const DISPID_IPBackColor: DISPID_InkCollector = 19i32;
pub const DISPID_ICCursors: DISPID_InkCollector = 20i32;
pub const DISPID_ICMarginX: DISPID_InkCollector = 21i32;
pub const DISPID_ICMarginY: DISPID_InkCollector = 22i32;
pub const DISPID_ICSetWindowInputRectangle: DISPID_InkCollector = 23i32;
pub const DISPID_ICGetWindowInputRectangle: DISPID_InkCollector = 24i32;
pub const DISPID_ICTablet: DISPID_InkCollector = 25i32;
pub const DISPID_ICSetAllTabletsMode: DISPID_InkCollector = 26i32;
pub const DISPID_ICSetSingleTabletIntegratedMode: DISPID_InkCollector = 27i32;
pub const DISPID_ICCollectionMode: DISPID_InkCollector = 28i32;
pub const DISPID_ICSetGestureStatus: DISPID_InkCollector = 29i32;
pub const DISPID_ICGetGestureStatus: DISPID_InkCollector = 30i32;
pub const DISPID_ICDynamicRendering: DISPID_InkCollector = 31i32;
pub const DISPID_ICDesiredPacketDescription: DISPID_InkCollector = 32i32;
pub const DISPID_IOEraserMode: DISPID_InkCollector = 33i32;
pub const DISPID_IOEraserWidth: DISPID_InkCollector = 34i32;
pub const DISPID_ICMouseIcon: DISPID_InkCollector = 35i32;
pub const DISPID_ICMousePointer: DISPID_InkCollector = 36i32;
pub const DISPID_IPInkEnabled: DISPID_InkCollector = 37i32;
pub const DISPID_ICSupportHighContrastInk: DISPID_InkCollector = 38i32;
pub const DISPID_IOSupportHighContrastSelectionUI: DISPID_InkCollector = 39i32;
pub type DISPID_InkCollectorEvent = i32;
pub const DISPID_ICEStroke: DISPID_InkCollectorEvent = 1i32;
pub const DISPID_ICECursorDown: DISPID_InkCollectorEvent = 2i32;
pub const DISPID_ICENewPackets: DISPID_InkCollectorEvent = 3i32;
pub const DISPID_ICENewInAirPackets: DISPID_InkCollectorEvent = 4i32;
pub const DISPID_ICECursorButtonDown: DISPID_InkCollectorEvent = 5i32;
pub const DISPID_ICECursorButtonUp: DISPID_InkCollectorEvent = 6i32;
pub const DISPID_ICECursorInRange: DISPID_InkCollectorEvent = 7i32;
pub const DISPID_ICECursorOutOfRange: DISPID_InkCollectorEvent = 8i32;
pub const DISPID_ICESystemGesture: DISPID_InkCollectorEvent = 9i32;
pub const DISPID_ICEGesture: DISPID_InkCollectorEvent = 10i32;
pub const DISPID_ICETabletAdded: DISPID_InkCollectorEvent = 11i32;
pub const DISPID_ICETabletRemoved: DISPID_InkCollectorEvent = 12i32;
pub const DISPID_IOEPainting: DISPID_InkCollectorEvent = 13i32;
pub const DISPID_IOEPainted: DISPID_InkCollectorEvent = 14i32;
pub const DISPID_IOESelectionChanging: DISPID_InkCollectorEvent = 15i32;
pub const DISPID_IOESelectionChanged: DISPID_InkCollectorEvent = 16i32;
pub const DISPID_IOESelectionMoving: DISPID_InkCollectorEvent = 17i32;
pub const DISPID_IOESelectionMoved: DISPID_InkCollectorEvent = 18i32;
pub const DISPID_IOESelectionResizing: DISPID_InkCollectorEvent = 19i32;
pub const DISPID_IOESelectionResized: DISPID_InkCollectorEvent = 20i32;
pub const DISPID_IOEStrokesDeleting: DISPID_InkCollectorEvent = 21i32;
pub const DISPID_IOEStrokesDeleted: DISPID_InkCollectorEvent = 22i32;
pub const DISPID_IPEChangeUICues: DISPID_InkCollectorEvent = 23i32;
pub const DISPID_IPEClick: DISPID_InkCollectorEvent = 24i32;
pub const DISPID_IPEDblClick: DISPID_InkCollectorEvent = 25i32;
pub const DISPID_IPEInvalidated: DISPID_InkCollectorEvent = 26i32;
pub const DISPID_IPEMouseDown: DISPID_InkCollectorEvent = 27i32;
pub const DISPID_IPEMouseEnter: DISPID_InkCollectorEvent = 28i32;
pub const DISPID_IPEMouseHover: DISPID_InkCollectorEvent = 29i32;
pub const DISPID_IPEMouseLeave: DISPID_InkCollectorEvent = 30i32;
pub const DISPID_IPEMouseMove: DISPID_InkCollectorEvent = 31i32;
pub const DISPID_IPEMouseUp: DISPID_InkCollectorEvent = 32i32;
pub const DISPID_IPEMouseWheel: DISPID_InkCollectorEvent = 33i32;
pub const DISPID_IPESizeModeChanged: DISPID_InkCollectorEvent = 34i32;
pub const DISPID_IPEStyleChanged: DISPID_InkCollectorEvent = 35i32;
pub const DISPID_IPESystemColorsChanged: DISPID_InkCollectorEvent = 36i32;
pub const DISPID_IPEKeyDown: DISPID_InkCollectorEvent = 37i32;
pub const DISPID_IPEKeyPress: DISPID_InkCollectorEvent = 38i32;
pub const DISPID_IPEKeyUp: DISPID_InkCollectorEvent = 39i32;
pub const DISPID_IPEResize: DISPID_InkCollectorEvent = 40i32;
pub const DISPID_IPESizeChanged: DISPID_InkCollectorEvent = 41i32;
pub type DISPID_InkCursor = i32;
pub const DISPID_ICsrName: DISPID_InkCursor = 0i32;
pub const DISPID_ICsrId: DISPID_InkCursor = 1i32;
pub const DISPID_ICsrDrawingAttributes: DISPID_InkCursor = 2i32;
pub const DISPID_ICsrButtons: DISPID_InkCursor = 3i32;
pub const DISPID_ICsrInverted: DISPID_InkCursor = 4i32;
pub const DISPID_ICsrTablet: DISPID_InkCursor = 5i32;
pub type DISPID_InkCursorButton = i32;
pub const DISPID_ICBName: DISPID_InkCursorButton = 0i32;
pub const DISPID_ICBId: DISPID_InkCursorButton = 1i32;
pub const DISPID_ICBState: DISPID_InkCursorButton = 2i32;
pub type DISPID_InkCursorButtons = i32;
pub const DISPID_ICBs_NewEnum: DISPID_InkCursorButtons = -4i32;
pub const DISPID_ICBsItem: DISPID_InkCursorButtons = 0i32;
pub const DISPID_ICBsCount: DISPID_InkCursorButtons = 1i32;
pub type DISPID_InkCursors = i32;
pub const DISPID_ICs_NewEnum: DISPID_InkCursors = -4i32;
pub const DISPID_ICsItem: DISPID_InkCursors = 0i32;
pub const DISPID_ICsCount: DISPID_InkCursors = 1i32;
pub type DISPID_InkCustomStrokes = i32;
pub const DISPID_ICSs_NewEnum: DISPID_InkCustomStrokes = -4i32;
pub const DISPID_ICSsItem: DISPID_InkCustomStrokes = 0i32;
pub const DISPID_ICSsCount: DISPID_InkCustomStrokes = 1i32;
pub const DISPID_ICSsAdd: DISPID_InkCustomStrokes = 2i32;
pub const DISPID_ICSsRemove: DISPID_InkCustomStrokes = 3i32;
pub const DISPID_ICSsClear: DISPID_InkCustomStrokes = 4i32;
pub type DISPID_InkDivider = i32;
pub const DISPID_IInkDivider_Strokes: DISPID_InkDivider = 1i32;
pub const DISPID_IInkDivider_RecognizerContext: DISPID_InkDivider = 2i32;
pub const DISPID_IInkDivider_LineHeight: DISPID_InkDivider = 3i32;
pub const DISPID_IInkDivider_Divide: DISPID_InkDivider = 4i32;
pub type DISPID_InkDivisionResult = i32;
pub const DISPID_IInkDivisionResult_Strokes: DISPID_InkDivisionResult = 1i32;
pub const DISPID_IInkDivisionResult_ResultByType: DISPID_InkDivisionResult = 2i32;
pub type DISPID_InkDivisionUnit = i32;
pub const DISPID_IInkDivisionUnit_Strokes: DISPID_InkDivisionUnit = 1i32;
pub const DISPID_IInkDivisionUnit_DivisionType: DISPID_InkDivisionUnit = 2i32;
pub const DISPID_IInkDivisionUnit_RecognizedString: DISPID_InkDivisionUnit = 3i32;
pub const DISPID_IInkDivisionUnit_RotationTransform: DISPID_InkDivisionUnit = 4i32;
pub type DISPID_InkDivisionUnits = i32;
pub const DISPID_IInkDivisionUnits_NewEnum: DISPID_InkDivisionUnits = -4i32;
pub const DISPID_IInkDivisionUnits_Item: DISPID_InkDivisionUnits = 0i32;
pub const DISPID_IInkDivisionUnits_Count: DISPID_InkDivisionUnits = 1i32;
pub type DISPID_InkDrawingAttributes = i32;
pub const DISPID_DAHeight: DISPID_InkDrawingAttributes = 1i32;
pub const DISPID_DAColor: DISPID_InkDrawingAttributes = 2i32;
pub const DISPID_DAWidth: DISPID_InkDrawingAttributes = 3i32;
pub const DISPID_DAFitToCurve: DISPID_InkDrawingAttributes = 4i32;
pub const DISPID_DAIgnorePressure: DISPID_InkDrawingAttributes = 5i32;
pub const DISPID_DAAntiAliased: DISPID_InkDrawingAttributes = 6i32;
pub const DISPID_DATransparency: DISPID_InkDrawingAttributes = 7i32;
pub const DISPID_DARasterOperation: DISPID_InkDrawingAttributes = 8i32;
pub const DISPID_DAPenTip: DISPID_InkDrawingAttributes = 9i32;
pub const DISPID_DAClone: DISPID_InkDrawingAttributes = 10i32;
pub const DISPID_DAExtendedProperties: DISPID_InkDrawingAttributes = 11i32;
pub type DISPID_InkEdit = i32;
pub const DISPID_Text: DISPID_InkEdit = 0i32;
pub const DISPID_TextRTF: DISPID_InkEdit = 1i32;
pub const DISPID_Hwnd: DISPID_InkEdit = 2i32;
pub const DISPID_DisableNoScroll: DISPID_InkEdit = 3i32;
pub const DISPID_Locked: DISPID_InkEdit = 4i32;
pub const DISPID_Enabled: DISPID_InkEdit = 5i32;
pub const DISPID_MaxLength: DISPID_InkEdit = 6i32;
pub const DISPID_MultiLine: DISPID_InkEdit = 7i32;
pub const DISPID_ScrollBars: DISPID_InkEdit = 8i32;
pub const DISPID_RTSelStart: DISPID_InkEdit = 9i32;
pub const DISPID_RTSelLength: DISPID_InkEdit = 10i32;
pub const DISPID_RTSelText: DISPID_InkEdit = 11i32;
pub const DISPID_SelAlignment: DISPID_InkEdit = 12i32;
pub const DISPID_SelBold: DISPID_InkEdit = 13i32;
pub const DISPID_SelCharOffset: DISPID_InkEdit = 14i32;
pub const DISPID_SelColor: DISPID_InkEdit = 15i32;
pub const DISPID_SelFontName: DISPID_InkEdit = 16i32;
pub const DISPID_SelFontSize: DISPID_InkEdit = 17i32;
pub const DISPID_SelItalic: DISPID_InkEdit = 18i32;
pub const DISPID_SelRTF: DISPID_InkEdit = 19i32;
pub const DISPID_SelUnderline: DISPID_InkEdit = 20i32;
pub const DISPID_DragIcon: DISPID_InkEdit = 21i32;
pub const DISPID_Status: DISPID_InkEdit = 22i32;
pub const DISPID_UseMouseForInput: DISPID_InkEdit = 23i32;
pub const DISPID_InkMode: DISPID_InkEdit = 24i32;
pub const DISPID_InkInsertMode: DISPID_InkEdit = 25i32;
pub const DISPID_RecoTimeout: DISPID_InkEdit = 26i32;
pub const DISPID_DrawAttr: DISPID_InkEdit = 27i32;
pub const DISPID_Recognizer: DISPID_InkEdit = 28i32;
pub const DISPID_Factoid: DISPID_InkEdit = 29i32;
pub const DISPID_SelInk: DISPID_InkEdit = 30i32;
pub const DISPID_SelInksDisplayMode: DISPID_InkEdit = 31i32;
pub const DISPID_Recognize: DISPID_InkEdit = 32i32;
pub const DISPID_GetGestStatus: DISPID_InkEdit = 33i32;
pub const DISPID_SetGestStatus: DISPID_InkEdit = 34i32;
pub const DISPID_Refresh: DISPID_InkEdit = 35i32;
pub type DISPID_InkEditEvents = i32;
pub const DISPID_IeeChange: DISPID_InkEditEvents = 1i32;
pub const DISPID_IeeSelChange: DISPID_InkEditEvents = 2i32;
pub const DISPID_IeeKeyDown: DISPID_InkEditEvents = 3i32;
pub const DISPID_IeeKeyUp: DISPID_InkEditEvents = 4i32;
pub const DISPID_IeeMouseUp: DISPID_InkEditEvents = 5i32;
pub const DISPID_IeeMouseDown: DISPID_InkEditEvents = 6i32;
pub const DISPID_IeeKeyPress: DISPID_InkEditEvents = 7i32;
pub const DISPID_IeeDblClick: DISPID_InkEditEvents = 8i32;
pub const DISPID_IeeClick: DISPID_InkEditEvents = 9i32;
pub const DISPID_IeeMouseMove: DISPID_InkEditEvents = 10i32;
pub const DISPID_IeeCursorDown: DISPID_InkEditEvents = 21i32;
pub const DISPID_IeeStroke: DISPID_InkEditEvents = 22i32;
pub const DISPID_IeeGesture: DISPID_InkEditEvents = 23i32;
pub const DISPID_IeeRecognitionResult: DISPID_InkEditEvents = 24i32;
pub type DISPID_InkEvent = i32;
pub const DISPID_IEInkAdded: DISPID_InkEvent = 1i32;
pub const DISPID_IEInkDeleted: DISPID_InkEvent = 2i32;
pub type DISPID_InkExtendedProperties = i32;
pub const DISPID_IEPs_NewEnum: DISPID_InkExtendedProperties = -4i32;
pub const DISPID_IEPsItem: DISPID_InkExtendedProperties = 0i32;
pub const DISPID_IEPsCount: DISPID_InkExtendedProperties = 1i32;
pub const DISPID_IEPsAdd: DISPID_InkExtendedProperties = 2i32;
pub const DISPID_IEPsRemove: DISPID_InkExtendedProperties = 3i32;
pub const DISPID_IEPsClear: DISPID_InkExtendedProperties = 4i32;
pub const DISPID_IEPsDoesPropertyExist: DISPID_InkExtendedProperties = 5i32;
pub type DISPID_InkExtendedProperty = i32;
pub const DISPID_IEPGuid: DISPID_InkExtendedProperty = 1i32;
pub const DISPID_IEPData: DISPID_InkExtendedProperty = 2i32;
pub type DISPID_InkGesture = i32;
pub const DISPID_IGId: DISPID_InkGesture = 0i32;
pub const DISPID_IGGetHotPoint: DISPID_InkGesture = 1i32;
pub const DISPID_IGConfidence: DISPID_InkGesture = 2i32;
pub type DISPID_InkRecoAlternate = i32;
pub const DISPID_InkRecoAlternate_String: DISPID_InkRecoAlternate = 1i32;
pub const DISPID_InkRecoAlternate_LineNumber: DISPID_InkRecoAlternate = 2i32;
pub const DISPID_InkRecoAlternate_Baseline: DISPID_InkRecoAlternate = 3i32;
pub const DISPID_InkRecoAlternate_Midline: DISPID_InkRecoAlternate = 4i32;
pub const DISPID_InkRecoAlternate_Ascender: DISPID_InkRecoAlternate = 5i32;
pub const DISPID_InkRecoAlternate_Descender: DISPID_InkRecoAlternate = 6i32;
pub const DISPID_InkRecoAlternate_Confidence: DISPID_InkRecoAlternate = 7i32;
pub const DISPID_InkRecoAlternate_Strokes: DISPID_InkRecoAlternate = 8i32;
pub const DISPID_InkRecoAlternate_GetStrokesFromStrokeRanges: DISPID_InkRecoAlternate = 9i32;
pub const DISPID_InkRecoAlternate_GetStrokesFromTextRange: DISPID_InkRecoAlternate = 10i32;
pub const DISPID_InkRecoAlternate_GetTextRangeFromStrokes: DISPID_InkRecoAlternate = 11i32;
pub const DISPID_InkRecoAlternate_GetPropertyValue: DISPID_InkRecoAlternate = 12i32;
pub const DISPID_InkRecoAlternate_LineAlternates: DISPID_InkRecoAlternate = 13i32;
pub const DISPID_InkRecoAlternate_ConfidenceAlternates: DISPID_InkRecoAlternate = 14i32;
pub const DISPID_InkRecoAlternate_AlternatesWithConstantPropertyValues: DISPID_InkRecoAlternate = 15i32;
pub type DISPID_InkRecoContext = i32;
pub const DISPID_IRecoCtx_Strokes: DISPID_InkRecoContext = 1i32;
pub const DISPID_IRecoCtx_CharacterAutoCompletionMode: DISPID_InkRecoContext = 2i32;
pub const DISPID_IRecoCtx_Factoid: DISPID_InkRecoContext = 3i32;
pub const DISPID_IRecoCtx_WordList: DISPID_InkRecoContext = 4i32;
pub const DISPID_IRecoCtx_Recognizer: DISPID_InkRecoContext = 5i32;
pub const DISPID_IRecoCtx_Guide: DISPID_InkRecoContext = 6i32;
pub const DISPID_IRecoCtx_Flags: DISPID_InkRecoContext = 7i32;
pub const DISPID_IRecoCtx_PrefixText: DISPID_InkRecoContext = 8i32;
pub const DISPID_IRecoCtx_SuffixText: DISPID_InkRecoContext = 9i32;
pub const DISPID_IRecoCtx_StopRecognition: DISPID_InkRecoContext = 10i32;
pub const DISPID_IRecoCtx_Clone: DISPID_InkRecoContext = 11i32;
pub const DISPID_IRecoCtx_Recognize: DISPID_InkRecoContext = 12i32;
pub const DISPID_IRecoCtx_StopBackgroundRecognition: DISPID_InkRecoContext = 13i32;
pub const DISPID_IRecoCtx_EndInkInput: DISPID_InkRecoContext = 14i32;
pub const DISPID_IRecoCtx_BackgroundRecognize: DISPID_InkRecoContext = 15i32;
pub const DISPID_IRecoCtx_BackgroundRecognizeWithAlternates: DISPID_InkRecoContext = 16i32;
pub const DISPID_IRecoCtx_IsStringSupported: DISPID_InkRecoContext = 17i32;
pub type DISPID_InkRecoContext2 = i32;
pub const DISPID_IRecoCtx2_EnabledUnicodeRanges: DISPID_InkRecoContext2 = 0i32;
pub type DISPID_InkRecognitionAlternates = i32;
pub const DISPID_InkRecognitionAlternates_NewEnum: DISPID_InkRecognitionAlternates = -4i32;
pub const DISPID_InkRecognitionAlternates_Item: DISPID_InkRecognitionAlternates = 0i32;
pub const DISPID_InkRecognitionAlternates_Count: DISPID_InkRecognitionAlternates = 1i32;
pub const DISPID_InkRecognitionAlternates_Strokes: DISPID_InkRecognitionAlternates = 2i32;
pub type DISPID_InkRecognitionEvent = i32;
pub const DISPID_IRERecognitionWithAlternates: DISPID_InkRecognitionEvent = 1i32;
pub const DISPID_IRERecognition: DISPID_InkRecognitionEvent = 2i32;
pub type DISPID_InkRecognitionResult = i32;
pub const DISPID_InkRecognitionResult_TopString: DISPID_InkRecognitionResult = 1i32;
pub const DISPID_InkRecognitionResult_TopAlternate: DISPID_InkRecognitionResult = 2i32;
pub const DISPID_InkRecognitionResult_Strokes: DISPID_InkRecognitionResult = 3i32;
pub const DISPID_InkRecognitionResult_TopConfidence: DISPID_InkRecognitionResult = 4i32;
pub const DISPID_InkRecognitionResult_AlternatesFromSelection: DISPID_InkRecognitionResult = 5i32;
pub const DISPID_InkRecognitionResult_ModifyTopAlternate: DISPID_InkRecognitionResult = 6i32;
pub const DISPID_InkRecognitionResult_SetResultOnStrokes: DISPID_InkRecognitionResult = 7i32;
pub type DISPID_InkRecognizer = i32;
pub const DISPID_RecoClsid: DISPID_InkRecognizer = 1i32;
pub const DISPID_RecoName: DISPID_InkRecognizer = 2i32;
pub const DISPID_RecoVendor: DISPID_InkRecognizer = 3i32;
pub const DISPID_RecoCapabilities: DISPID_InkRecognizer = 4i32;
pub const DISPID_RecoLanguageID: DISPID_InkRecognizer = 5i32;
pub const DISPID_RecoPreferredPacketDescription: DISPID_InkRecognizer = 6i32;
pub const DISPID_RecoCreateRecognizerContext: DISPID_InkRecognizer = 7i32;
pub const DISPID_RecoSupportedProperties: DISPID_InkRecognizer = 8i32;
pub type DISPID_InkRecognizer2 = i32;
pub const DISPID_RecoId: DISPID_InkRecognizer2 = 0i32;
pub const DISPID_RecoUnicodeRanges: DISPID_InkRecognizer2 = 1i32;
pub type DISPID_InkRecognizerGuide = i32;
pub const DISPID_IRGWritingBox: DISPID_InkRecognizerGuide = 1i32;
pub const DISPID_IRGDrawnBox: DISPID_InkRecognizerGuide = 2i32;
pub const DISPID_IRGRows: DISPID_InkRecognizerGuide = 3i32;
pub const DISPID_IRGColumns: DISPID_InkRecognizerGuide = 4i32;
pub const DISPID_IRGMidline: DISPID_InkRecognizerGuide = 5i32;
pub const DISPID_IRGGuideData: DISPID_InkRecognizerGuide = 6i32;
pub type DISPID_InkRecognizers = i32;
pub const DISPID_IRecos_NewEnum: DISPID_InkRecognizers = -4i32;
pub const DISPID_IRecosItem: DISPID_InkRecognizers = 0i32;
pub const DISPID_IRecosCount: DISPID_InkRecognizers = 1i32;
pub const DISPID_IRecosGetDefaultRecognizer: DISPID_InkRecognizers = 2i32;
pub type DISPID_InkRectangle = i32;
pub const DISPID_IRTop: DISPID_InkRectangle = 1i32;
pub const DISPID_IRLeft: DISPID_InkRectangle = 2i32;
pub const DISPID_IRBottom: DISPID_InkRectangle = 3i32;
pub const DISPID_IRRight: DISPID_InkRectangle = 4i32;
pub const DISPID_IRGetRectangle: DISPID_InkRectangle = 5i32;
pub const DISPID_IRSetRectangle: DISPID_InkRectangle = 6i32;
pub const DISPID_IRData: DISPID_InkRectangle = 7i32;
pub type DISPID_InkRenderer = i32;
pub const DISPID_IRGetViewTransform: DISPID_InkRenderer = 1i32;
pub const DISPID_IRSetViewTransform: DISPID_InkRenderer = 2i32;
pub const DISPID_IRGetObjectTransform: DISPID_InkRenderer = 3i32;
pub const DISPID_IRSetObjectTransform: DISPID_InkRenderer = 4i32;
pub const DISPID_IRDraw: DISPID_InkRenderer = 5i32;
pub const DISPID_IRDrawStroke: DISPID_InkRenderer = 6i32;
pub const DISPID_IRPixelToInkSpace: DISPID_InkRenderer = 7i32;
pub const DISPID_IRInkSpaceToPixel: DISPID_InkRenderer = 8i32;
pub const DISPID_IRPixelToInkSpaceFromPoints: DISPID_InkRenderer = 9i32;
pub const DISPID_IRInkSpaceToPixelFromPoints: DISPID_InkRenderer = 10i32;
pub const DISPID_IRMeasure: DISPID_InkRenderer = 11i32;
pub const DISPID_IRMeasureStroke: DISPID_InkRenderer = 12i32;
pub const DISPID_IRMove: DISPID_InkRenderer = 13i32;
pub const DISPID_IRRotate: DISPID_InkRenderer = 14i32;
pub const DISPID_IRScale: DISPID_InkRenderer = 15i32;
pub type DISPID_InkStrokeDisp = i32;
pub const DISPID_ISDInkIndex: DISPID_InkStrokeDisp = 1i32;
pub const DISPID_ISDID: DISPID_InkStrokeDisp = 2i32;
pub const DISPID_ISDGetBoundingBox: DISPID_InkStrokeDisp = 3i32;
pub const DISPID_ISDDrawingAttributes: DISPID_InkStrokeDisp = 4i32;
pub const DISPID_ISDFindIntersections: DISPID_InkStrokeDisp = 5i32;
pub const DISPID_ISDGetRectangleIntersections: DISPID_InkStrokeDisp = 6i32;
pub const DISPID_ISDClip: DISPID_InkStrokeDisp = 7i32;
pub const DISPID_ISDHitTestCircle: DISPID_InkStrokeDisp = 8i32;
pub const DISPID_ISDNearestPoint: DISPID_InkStrokeDisp = 9i32;
pub const DISPID_ISDSplit: DISPID_InkStrokeDisp = 10i32;
pub const DISPID_ISDExtendedProperties: DISPID_InkStrokeDisp = 11i32;
pub const DISPID_ISDInk: DISPID_InkStrokeDisp = 12i32;
pub const DISPID_ISDBezierPoints: DISPID_InkStrokeDisp = 13i32;
pub const DISPID_ISDPolylineCusps: DISPID_InkStrokeDisp = 14i32;
pub const DISPID_ISDBezierCusps: DISPID_InkStrokeDisp = 15i32;
pub const DISPID_ISDSelfIntersections: DISPID_InkStrokeDisp = 16i32;
pub const DISPID_ISDPacketCount: DISPID_InkStrokeDisp = 17i32;
pub const DISPID_ISDPacketSize: DISPID_InkStrokeDisp = 18i32;
pub const DISPID_ISDPacketDescription: DISPID_InkStrokeDisp = 19i32;
pub const DISPID_ISDDeleted: DISPID_InkStrokeDisp = 20i32;
pub const DISPID_ISDGetPacketDescriptionPropertyMetrics: DISPID_InkStrokeDisp = 21i32;
pub const DISPID_ISDGetPoints: DISPID_InkStrokeDisp = 22i32;
pub const DISPID_ISDSetPoints: DISPID_InkStrokeDisp = 23i32;
pub const DISPID_ISDGetPacketData: DISPID_InkStrokeDisp = 24i32;
pub const DISPID_ISDGetPacketValuesByProperty: DISPID_InkStrokeDisp = 25i32;
pub const DISPID_ISDSetPacketValuesByProperty: DISPID_InkStrokeDisp = 26i32;
pub const DISPID_ISDGetFlattenedBezierPoints: DISPID_InkStrokeDisp = 27i32;
pub const DISPID_ISDScaleToRectangle: DISPID_InkStrokeDisp = 28i32;
pub const DISPID_ISDTransform: DISPID_InkStrokeDisp = 29i32;
pub const DISPID_ISDMove: DISPID_InkStrokeDisp = 30i32;
pub const DISPID_ISDRotate: DISPID_InkStrokeDisp = 31i32;
pub const DISPID_ISDShear: DISPID_InkStrokeDisp = 32i32;
pub const DISPID_ISDScale: DISPID_InkStrokeDisp = 33i32;
pub type DISPID_InkStrokes = i32;
pub const DISPID_ISs_NewEnum: DISPID_InkStrokes = -4i32;
pub const DISPID_ISsItem: DISPID_InkStrokes = 0i32;
pub const DISPID_ISsCount: DISPID_InkStrokes = 1i32;
pub const DISPID_ISsValid: DISPID_InkStrokes = 2i32;
pub const DISPID_ISsInk: DISPID_InkStrokes = 3i32;
pub const DISPID_ISsAdd: DISPID_InkStrokes = 4i32;
pub const DISPID_ISsAddStrokes: DISPID_InkStrokes = 5i32;
pub const DISPID_ISsRemove: DISPID_InkStrokes = 6i32;
pub const DISPID_ISsRemoveStrokes: DISPID_InkStrokes = 7i32;
pub const DISPID_ISsToString: DISPID_InkStrokes = 8i32;
pub const DISPID_ISsModifyDrawingAttributes: DISPID_InkStrokes = 9i32;
pub const DISPID_ISsGetBoundingBox: DISPID_InkStrokes = 10i32;
pub const DISPID_ISsScaleToRectangle: DISPID_InkStrokes = 11i32;
pub const DISPID_ISsTransform: DISPID_InkStrokes = 12i32;
pub const DISPID_ISsMove: DISPID_InkStrokes = 13i32;
pub const DISPID_ISsRotate: DISPID_InkStrokes = 14i32;
pub const DISPID_ISsShear: DISPID_InkStrokes = 15i32;
pub const DISPID_ISsScale: DISPID_InkStrokes = 16i32;
pub const DISPID_ISsClip: DISPID_InkStrokes = 17i32;
pub const DISPID_ISsRecognitionResult: DISPID_InkStrokes = 18i32;
pub const DISPID_ISsRemoveRecognitionResult: DISPID_InkStrokes = 19i32;
pub type DISPID_InkTablet = i32;
pub const DISPID_ITName: DISPID_InkTablet = 0i32;
pub const DISPID_ITPlugAndPlayId: DISPID_InkTablet = 1i32;
pub const DISPID_ITPropertyMetrics: DISPID_InkTablet = 2i32;
pub const DISPID_ITIsPacketPropertySupported: DISPID_InkTablet = 3i32;
pub const DISPID_ITMaximumInputRectangle: DISPID_InkTablet = 4i32;
pub const DISPID_ITHardwareCapabilities: DISPID_InkTablet = 5i32;
pub type DISPID_InkTablet2 = i32;
pub const DISPID_IT2DeviceKind: DISPID_InkTablet2 = 0i32;
pub type DISPID_InkTablet3 = i32;
pub const DISPID_IT3IsMultiTouch: DISPID_InkTablet3 = 0i32;
pub const DISPID_IT3MaximumCursors: DISPID_InkTablet3 = 1i32;
pub type DISPID_InkTablets = i32;
pub const DISPID_ITs_NewEnum: DISPID_InkTablets = -4i32;
pub const DISPID_ITsItem: DISPID_InkTablets = 0i32;
pub const DISPID_ITsDefaultTablet: DISPID_InkTablets = 1i32;
pub const DISPID_ITsCount: DISPID_InkTablets = 2i32;
pub const DISPID_ITsIsPacketPropertySupported: DISPID_InkTablets = 3i32;
pub type DISPID_InkTransform = i32;
pub const DISPID_ITReset: DISPID_InkTransform = 1i32;
pub const DISPID_ITTranslate: DISPID_InkTransform = 2i32;
pub const DISPID_ITRotate: DISPID_InkTransform = 3i32;
pub const DISPID_ITReflect: DISPID_InkTransform = 4i32;
pub const DISPID_ITShear: DISPID_InkTransform = 5i32;
pub const DISPID_ITScale: DISPID_InkTransform = 6i32;
pub const DISPID_ITeM11: DISPID_InkTransform = 7i32;
pub const DISPID_ITeM12: DISPID_InkTransform = 8i32;
pub const DISPID_ITeM21: DISPID_InkTransform = 9i32;
pub const DISPID_ITeM22: DISPID_InkTransform = 10i32;
pub const DISPID_ITeDx: DISPID_InkTransform = 11i32;
pub const DISPID_ITeDy: DISPID_InkTransform = 12i32;
pub const DISPID_ITGetTransform: DISPID_InkTransform = 13i32;
pub const DISPID_ITSetTransform: DISPID_InkTransform = 14i32;
pub const DISPID_ITData: DISPID_InkTransform = 15i32;
pub type DISPID_InkWordList = i32;
pub const DISPID_InkWordList_AddWord: DISPID_InkWordList = 0i32;
pub const DISPID_InkWordList_RemoveWord: DISPID_InkWordList = 1i32;
pub const DISPID_InkWordList_Merge: DISPID_InkWordList = 2i32;
pub type DISPID_InkWordList2 = i32;
pub const DISPID_InkWordList2_AddWords: DISPID_InkWordList2 = 3i32;
pub type DISPID_MathInputControlEvents = i32;
pub const DISPID_MICInsert: DISPID_MathInputControlEvents = 0i32;
pub const DISPID_MICClose: DISPID_MathInputControlEvents = 1i32;
pub const DISPID_MICPaint: DISPID_MathInputControlEvents = 2i32;
pub const DISPID_MICClear: DISPID_MathInputControlEvents = 3i32;
pub type DISPID_PenInputPanel = i32;
pub const DISPID_PIPAttachedEditWindow: DISPID_PenInputPanel = 0i32;
pub const DISPID_PIPFactoid: DISPID_PenInputPanel = 1i32;
pub const DISPID_PIPCurrentPanel: DISPID_PenInputPanel = 2i32;
pub const DISPID_PIPDefaultPanel: DISPID_PenInputPanel = 3i32;
pub const DISPID_PIPVisible: DISPID_PenInputPanel = 4i32;
pub const DISPID_PIPTop: DISPID_PenInputPanel = 5i32;
pub const DISPID_PIPLeft: DISPID_PenInputPanel = 6i32;
pub const DISPID_PIPWidth: DISPID_PenInputPanel = 7i32;
pub const DISPID_PIPHeight: DISPID_PenInputPanel = 8i32;
pub const DISPID_PIPMoveTo: DISPID_PenInputPanel = 9i32;
pub const DISPID_PIPCommitPendingInput: DISPID_PenInputPanel = 10i32;
pub const DISPID_PIPRefresh: DISPID_PenInputPanel = 11i32;
pub const DISPID_PIPBusy: DISPID_PenInputPanel = 12i32;
pub const DISPID_PIPVerticalOffset: DISPID_PenInputPanel = 13i32;
pub const DISPID_PIPHorizontalOffset: DISPID_PenInputPanel = 14i32;
pub const DISPID_PIPEnableTsf: DISPID_PenInputPanel = 15i32;
pub const DISPID_PIPAutoShow: DISPID_PenInputPanel = 16i32;
pub type DISPID_PenInputPanelEvents = i32;
pub const DISPID_PIPEVisibleChanged: DISPID_PenInputPanelEvents = 0i32;
pub const DISPID_PIPEPanelChanged: DISPID_PenInputPanelEvents = 1i32;
pub const DISPID_PIPEInputFailed: DISPID_PenInputPanelEvents = 2i32;
pub const DISPID_PIPEPanelMoving: DISPID_PenInputPanelEvents = 3i32;
pub type DISPID_StrokeEvent = i32;
pub const DISPID_SEStrokesAdded: DISPID_StrokeEvent = 1i32;
pub const DISPID_SEStrokesRemoved: DISPID_StrokeEvent = 2i32;
#[repr(C)]
pub struct DYNAMIC_RENDERER_CACHED_DATA {
pub strokeId: i32,
pub dynamicRenderer: IDynamicRenderer,
}
impl ::core::marker::Copy for DYNAMIC_RENDERER_CACHED_DATA {}
impl ::core::clone::Clone for DYNAMIC_RENDERER_CACHED_DATA {
fn clone(&self) -> Self {
*self
}
}
pub const DynamicRenderer: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 3973262058, data2: 29807, data3: 19915, data4: [191, 104, 8, 39, 87, 250, 255, 24] };
pub const EM_GETDRAWATTR: u32 = 1541u32;
pub const EM_GETFACTOID: u32 = 1549u32;
pub const EM_GETGESTURESTATUS: u32 = 1545u32;
pub const EM_GETINKINSERTMODE: u32 = 1539u32;
pub const EM_GETINKMODE: u32 = 1537u32;
pub const EM_GETMOUSEICON: u32 = 1553u32;
pub const EM_GETMOUSEPOINTER: u32 = 1555u32;
pub const EM_GETRECOGNIZER: u32 = 1547u32;
pub const EM_GETRECOTIMEOUT: u32 = 1543u32;
pub const EM_GETSELINK: u32 = 1551u32;
pub const EM_GETSELINKDISPLAYMODE: u32 = 1562u32;
pub const EM_GETSTATUS: u32 = 1557u32;
pub const EM_GETUSEMOUSEFORINPUT: u32 = 1559u32;
pub const EM_RECOGNIZE: u32 = 1558u32;
pub const EM_SETDRAWATTR: u32 = 1542u32;
pub const EM_SETFACTOID: u32 = 1550u32;
pub const EM_SETGESTURESTATUS: u32 = 1546u32;
pub const EM_SETINKINSERTMODE: u32 = 1540u32;
pub const EM_SETINKMODE: u32 = 1538u32;
pub const EM_SETMOUSEICON: u32 = 1554u32;
pub const EM_SETMOUSEPOINTER: u32 = 1556u32;
pub const EM_SETRECOGNIZER: u32 = 1548u32;
pub const EM_SETRECOTIMEOUT: u32 = 1544u32;
pub const EM_SETSELINK: u32 = 1552u32;
pub const EM_SETSELINKDISPLAYMODE: u32 = 1561u32;
pub const EM_SETUSEMOUSEFORINPUT: u32 = 1560u32;
pub type EventMask = i32;
pub const EventMask_InPlaceStateChanging: EventMask = 1i32;
pub const EventMask_InPlaceStateChanged: EventMask = 2i32;
pub const EventMask_InPlaceSizeChanging: EventMask = 4i32;
pub const EventMask_InPlaceSizeChanged: EventMask = 8i32;
pub const EventMask_InputAreaChanging: EventMask = 16i32;
pub const EventMask_InputAreaChanged: EventMask = 32i32;
pub const EventMask_CorrectionModeChanging: EventMask = 64i32;
pub const EventMask_CorrectionModeChanged: EventMask = 128i32;
pub const EventMask_InPlaceVisibilityChanging: EventMask = 256i32;
pub const EventMask_InPlaceVisibilityChanged: EventMask = 512i32;
pub const EventMask_TextInserting: EventMask = 1024i32;
pub const EventMask_TextInserted: EventMask = 2048i32;
pub const EventMask_All: EventMask = 4095i32;
pub const FACILITY_INK: u32 = 40u32;
pub type FLICKACTION_COMMANDCODE = i32;
pub const FLICKACTION_COMMANDCODE_NULL: FLICKACTION_COMMANDCODE = 0i32;
pub const FLICKACTION_COMMANDCODE_SCROLL: FLICKACTION_COMMANDCODE = 1i32;
pub const FLICKACTION_COMMANDCODE_APPCOMMAND: FLICKACTION_COMMANDCODE = 2i32;
pub const FLICKACTION_COMMANDCODE_CUSTOMKEY: FLICKACTION_COMMANDCODE = 3i32;
pub const FLICKACTION_COMMANDCODE_KEYMODIFIER: FLICKACTION_COMMANDCODE = 4i32;
pub type FLICKDIRECTION = i32;
pub const FLICKDIRECTION_MIN: FLICKDIRECTION = 0i32;
pub const FLICKDIRECTION_RIGHT: FLICKDIRECTION = 0i32;
pub const FLICKDIRECTION_UPRIGHT: FLICKDIRECTION = 1i32;
pub const FLICKDIRECTION_UP: FLICKDIRECTION = 2i32;
pub const FLICKDIRECTION_UPLEFT: FLICKDIRECTION = 3i32;
pub const FLICKDIRECTION_LEFT: FLICKDIRECTION = 4i32;
pub const FLICKDIRECTION_DOWNLEFT: FLICKDIRECTION = 5i32;
pub const FLICKDIRECTION_DOWN: FLICKDIRECTION = 6i32;
pub const FLICKDIRECTION_DOWNRIGHT: FLICKDIRECTION = 7i32;
pub const FLICKDIRECTION_INVALID: FLICKDIRECTION = 8i32;
pub type FLICKMODE = i32;
pub const FLICKMODE_MIN: FLICKMODE = 0i32;
pub const FLICKMODE_OFF: FLICKMODE = 0i32;
pub const FLICKMODE_ON: FLICKMODE = 1i32;
pub const FLICKMODE_LEARNING: FLICKMODE = 2i32;
pub const FLICKMODE_MAX: FLICKMODE = 2i32;
pub const FLICKMODE_DEFAULT: FLICKMODE = 1i32;
#[repr(C)]
pub struct FLICK_DATA {
pub _bitfield: i32,
}
impl ::core::marker::Copy for FLICK_DATA {}
impl ::core::clone::Clone for FLICK_DATA {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
pub struct FLICK_POINT {
pub _bitfield: i32,
}
impl ::core::marker::Copy for FLICK_POINT {}
impl ::core::clone::Clone for FLICK_POINT {
fn clone(&self) -> Self {
*self
}
}
pub const FLICK_WM_HANDLED_MASK: u32 = 1u32;
pub const GESTURE_ARROW_DOWN: u32 = 61497u32;
pub const GESTURE_ARROW_LEFT: u32 = 61498u32;
pub const GESTURE_ARROW_RIGHT: u32 = 61499u32;
pub const GESTURE_ARROW_UP: u32 = 61496u32;
pub const GESTURE_ASTERISK: u32 = 61608u32;
pub const GESTURE_BRACE_LEFT: u32 = 61674u32;
pub const GESTURE_BRACE_OVER: u32 = 61672u32;
pub const GESTURE_BRACE_RIGHT: u32 = 61675u32;
pub const GESTURE_BRACE_UNDER: u32 = 61673u32;
pub const GESTURE_BRACKET_LEFT: u32 = 61670u32;
pub const GESTURE_BRACKET_OVER: u32 = 61668u32;
pub const GESTURE_BRACKET_RIGHT: u32 = 61671u32;
pub const GESTURE_BRACKET_UNDER: u32 = 61669u32;
pub const GESTURE_BULLET: u32 = 61450u32;
pub const GESTURE_BULLET_CROSS: u32 = 61451u32;
pub const GESTURE_CHECK: u32 = 61445u32;
pub const GESTURE_CHEVRON_DOWN: u32 = 61489u32;
pub const GESTURE_CHEVRON_LEFT: u32 = 61490u32;
pub const GESTURE_CHEVRON_RIGHT: u32 = 61491u32;
pub const GESTURE_CHEVRON_UP: u32 = 61488u32;
pub const GESTURE_CIRCLE: u32 = 61472u32;
pub const GESTURE_CIRCLE_CIRCLE: u32 = 61475u32;
pub const GESTURE_CIRCLE_CROSS: u32 = 61477u32;
pub const GESTURE_CIRCLE_LINE_HORZ: u32 = 61479u32;
pub const GESTURE_CIRCLE_LINE_VERT: u32 = 61478u32;
pub const GESTURE_CIRCLE_TAP: u32 = 61474u32;
pub const GESTURE_CLOSEUP: u32 = 61455u32;
pub const GESTURE_CROSS: u32 = 61447u32;
pub const GESTURE_CURLICUE: u32 = 61456u32;
#[repr(C)]
pub struct GESTURE_DATA {
pub gestureId: i32,
pub recoConfidence: i32,
pub strokeCount: i32,
}
impl ::core::marker::Copy for GESTURE_DATA {}
impl ::core::clone::Clone for GESTURE_DATA {
fn clone(&self) -> Self {
*self
}
}
pub const GESTURE_DIAGONAL_LEFTDOWN: u32 = 61534u32;
pub const GESTURE_DIAGONAL_LEFTUP: u32 = 61532u32;
pub const GESTURE_DIAGONAL_RIGHTDOWN: u32 = 61535u32;
pub const GESTURE_DIAGONAL_RIGHTUP: u32 = 61533u32;
pub const GESTURE_DIGIT_0: u32 = 61594u32;
pub const GESTURE_DIGIT_1: u32 = 61595u32;
pub const GESTURE_DIGIT_2: u32 = 61596u32;
pub const GESTURE_DIGIT_3: u32 = 61597u32;
pub const GESTURE_DIGIT_4: u32 = 61598u32;
pub const GESTURE_DIGIT_5: u32 = 61599u32;
pub const GESTURE_DIGIT_6: u32 = 61600u32;
pub const GESTURE_DIGIT_7: u32 = 61601u32;
pub const GESTURE_DIGIT_8: u32 = 61602u32;
pub const GESTURE_DIGIT_9: u32 = 61603u32;
pub const GESTURE_DOLLAR: u32 = 61607u32;
pub const GESTURE_DOUBLE_ARROW_DOWN: u32 = 61501u32;
pub const GESTURE_DOUBLE_ARROW_LEFT: u32 = 61502u32;
pub const GESTURE_DOUBLE_ARROW_RIGHT: u32 = 61503u32;
pub const GESTURE_DOUBLE_ARROW_UP: u32 = 61500u32;
pub const GESTURE_DOUBLE_CIRCLE: u32 = 61473u32;
pub const GESTURE_DOUBLE_CURLICUE: u32 = 61457u32;
pub const GESTURE_DOUBLE_DOWN: u32 = 61625u32;
pub const GESTURE_DOUBLE_LEFT: u32 = 61626u32;
pub const GESTURE_DOUBLE_RIGHT: u32 = 61627u32;
pub const GESTURE_DOUBLE_TAP: u32 = 61681u32;
pub const GESTURE_DOUBLE_UP: u32 = 61624u32;
pub const GESTURE_DOWN: u32 = 61529u32;
pub const GESTURE_DOWN_ARROW_LEFT: u32 = 61506u32;
pub const GESTURE_DOWN_ARROW_RIGHT: u32 = 61507u32;
pub const GESTURE_DOWN_LEFT: u32 = 61546u32;
pub const GESTURE_DOWN_LEFT_LONG: u32 = 61542u32;
pub const GESTURE_DOWN_RIGHT: u32 = 61547u32;
pub const GESTURE_DOWN_RIGHT_LONG: u32 = 61543u32;
pub const GESTURE_DOWN_UP: u32 = 61537u32;
pub const GESTURE_EXCLAMATION: u32 = 61604u32;
pub const GESTURE_INFINITY: u32 = 61446u32;
pub const GESTURE_LEFT: u32 = 61530u32;
pub const GESTURE_LEFT_ARROW_DOWN: u32 = 61509u32;
pub const GESTURE_LEFT_ARROW_UP: u32 = 61508u32;
pub const GESTURE_LEFT_DOWN: u32 = 61549u32;
pub const GESTURE_LEFT_RIGHT: u32 = 61538u32;
pub const GESTURE_LEFT_UP: u32 = 61548u32;
pub const GESTURE_LETTER_A: u32 = 61568u32;
pub const GESTURE_LETTER_B: u32 = 61569u32;
pub const GESTURE_LETTER_C: u32 = 61570u32;
pub const GESTURE_LETTER_D: u32 = 61571u32;
pub const GESTURE_LETTER_E: u32 = 61572u32;
pub const GESTURE_LETTER_F: u32 = 61573u32;
pub const GESTURE_LETTER_G: u32 = 61574u32;
pub const GESTURE_LETTER_H: u32 = 61575u32;
pub const GESTURE_LETTER_I: u32 = 61576u32;
pub const GESTURE_LETTER_J: u32 = 61577u32;
pub const GESTURE_LETTER_K: u32 = 61578u32;
pub const GESTURE_LETTER_L: u32 = 61579u32;
pub const GESTURE_LETTER_M: u32 = 61580u32;
pub const GESTURE_LETTER_N: u32 = 61581u32;
pub const GESTURE_LETTER_O: u32 = 61582u32;
pub const GESTURE_LETTER_P: u32 = 61583u32;
pub const GESTURE_LETTER_Q: u32 = 61584u32;
pub const GESTURE_LETTER_R: u32 = 61585u32;
pub const GESTURE_LETTER_S: u32 = 61586u32;
pub const GESTURE_LETTER_T: u32 = 61587u32;
pub const GESTURE_LETTER_U: u32 = 61588u32;
pub const GESTURE_LETTER_V: u32 = 61589u32;
pub const GESTURE_LETTER_W: u32 = 61590u32;
pub const GESTURE_LETTER_X: u32 = 61591u32;
pub const GESTURE_LETTER_Y: u32 = 61592u32;
pub const GESTURE_LETTER_Z: u32 = 61593u32;
pub const GESTURE_NULL: u32 = 61440u32;
pub const GESTURE_OPENUP: u32 = 61454u32;
pub const GESTURE_PARAGRAPH: u32 = 61448u32;
pub const GESTURE_PLUS: u32 = 61609u32;
pub const GESTURE_QUAD_TAP: u32 = 61683u32;
pub const GESTURE_QUESTION: u32 = 61605u32;
pub const GESTURE_RECTANGLE: u32 = 61458u32;
pub const GESTURE_RIGHT: u32 = 61531u32;
pub const GESTURE_RIGHT_ARROW_DOWN: u32 = 61511u32;
pub const GESTURE_RIGHT_ARROW_UP: u32 = 61510u32;
pub const GESTURE_RIGHT_DOWN: u32 = 61551u32;
pub const GESTURE_RIGHT_LEFT: u32 = 61539u32;
pub const GESTURE_RIGHT_UP: u32 = 61550u32;
pub const GESTURE_SCRATCHOUT: u32 = 61441u32;
pub const GESTURE_SECTION: u32 = 61449u32;
pub const GESTURE_SEMICIRCLE_LEFT: u32 = 61480u32;
pub const GESTURE_SEMICIRCLE_RIGHT: u32 = 61481u32;
pub const GESTURE_SHARP: u32 = 61606u32;
pub const GESTURE_SQUARE: u32 = 61443u32;
pub const GESTURE_SQUIGGLE: u32 = 61452u32;
pub const GESTURE_STAR: u32 = 61444u32;
pub const GESTURE_SWAP: u32 = 61453u32;
pub const GESTURE_TAP: u32 = 61680u32;
pub const GESTURE_TRIANGLE: u32 = 61442u32;
pub const GESTURE_TRIPLE_DOWN: u32 = 61629u32;
pub const GESTURE_TRIPLE_LEFT: u32 = 61630u32;
pub const GESTURE_TRIPLE_RIGHT: u32 = 61631u32;
pub const GESTURE_TRIPLE_TAP: u32 = 61682u32;
pub const GESTURE_TRIPLE_UP: u32 = 61628u32;
pub const GESTURE_UP: u32 = 61528u32;
pub const GESTURE_UP_ARROW_LEFT: u32 = 61504u32;
pub const GESTURE_UP_ARROW_RIGHT: u32 = 61505u32;
pub const GESTURE_UP_DOWN: u32 = 61536u32;
pub const GESTURE_UP_LEFT: u32 = 61544u32;
pub const GESTURE_UP_LEFT_LONG: u32 = 61540u32;
pub const GESTURE_UP_RIGHT: u32 = 61545u32;
pub const GESTURE_UP_RIGHT_LONG: u32 = 61541u32;
pub const GUID_DYNAMIC_RENDERER_CACHED_DATA: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 3209894802, data2: 9663, data3: 19093, data4: [137, 173, 14, 71, 107, 52, 180, 245] };
pub const GUID_GESTURE_DATA: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 1105521679,
data2: 9898,
data3: 17754,
data4: [154, 165, 44, 211, 108, 246, 63, 185],
};
pub const GUID_PACKETPROPERTY_GUID_ALTITUDE_ORIENTATION: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 2195637703,
data2: 63162,
data3: 18694,
data4: [137, 79, 102, 214, 141, 252, 69, 108],
};
pub const GUID_PACKETPROPERTY_GUID_AZIMUTH_ORIENTATION: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 43066292, data2: 34856, data3: 16651, data4: [178, 80, 160, 83, 101, 149, 229, 220] };
pub const GUID_PACKETPROPERTY_GUID_BUTTON_PRESSURE: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 2340417476,
data2: 38570,
data3: 19454,
data4: [172, 38, 138, 95, 11, 224, 123, 245],
};
pub const GUID_PACKETPROPERTY_GUID_DEVICE_CONTACT_ID: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 39345041, data2: 1179, data3: 18256, data4: [150, 21, 223, 137, 72, 171, 60, 156] };
pub const GUID_PACKETPROPERTY_GUID_FINGERCONTACTCONFIDENCE: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 3875981316,
data2: 22512,
data3: 20224,
data4: [138, 12, 133, 61, 87, 120, 155, 233],
};
pub const GUID_PACKETPROPERTY_GUID_HEIGHT: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 3860355282, data2: 58439, data3: 16920, data4: [157, 63, 24, 134, 92, 32, 61, 244] };
pub const GUID_PACKETPROPERTY_GUID_NORMAL_PRESSURE: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 1929859117,
data2: 63988,
data3: 19992,
data4: [179, 242, 44, 225, 177, 163, 97, 12],
};
pub const GUID_PACKETPROPERTY_GUID_PACKET_STATUS: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 1846413247,
data2: 45031,
data3: 19703,
data4: [135, 209, 175, 100, 70, 32, 132, 24],
};
pub const GUID_PACKETPROPERTY_GUID_PITCH_ROTATION: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 2138986423, data2: 48695, data3: 19425, data4: [163, 86, 122, 132, 22, 14, 24, 147] };
pub const GUID_PACKETPROPERTY_GUID_ROLL_ROTATION: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 1566400086,
data2: 27561,
data3: 19547,
data4: [159, 176, 133, 28, 145, 113, 78, 86],
};
pub const GUID_PACKETPROPERTY_GUID_SERIAL_NUMBER: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 2024282966, data2: 2357, data3: 17555, data4: [186, 174, 0, 84, 26, 138, 22, 196] };
pub const GUID_PACKETPROPERTY_GUID_TANGENT_PRESSURE: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 1839483019, data2: 21060, data3: 16876, data4: [144, 91, 50, 216, 154, 184, 8, 9] };
pub const GUID_PACKETPROPERTY_GUID_TIMER_TICK: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 1130696901,
data2: 65235,
data3: 17873,
data4: [139, 118, 113, 211, 234, 122, 130, 157],
};
pub const GUID_PACKETPROPERTY_GUID_TWIST_ORIENTATION: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 221399392, data2: 5042, data3: 16868, data4: [172, 230, 122, 233, 212, 61, 45, 59] };
pub const GUID_PACKETPROPERTY_GUID_WIDTH: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 3131828557,
data2: 10002,
data3: 18677,
data4: [190, 157, 143, 139, 94, 160, 113, 26],
};
pub const GUID_PACKETPROPERTY_GUID_X: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 1502243471,
data2: 21184,
data3: 19360,
data4: [147, 175, 175, 53, 116, 17, 165, 97],
};
pub const GUID_PACKETPROPERTY_GUID_X_TILT_ORIENTATION: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 2832235322,
data2: 35824,
data3: 16560,
data4: [149, 169, 184, 10, 107, 183, 135, 191],
};
pub const GUID_PACKETPROPERTY_GUID_Y: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 3040845685, data2: 1248, data3: 17560, data4: [167, 238, 195, 13, 187, 90, 144, 17] };
pub const GUID_PACKETPROPERTY_GUID_YAW_ROTATION: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 1787074944,
data2: 31802,
data3: 17847,
data4: [170, 130, 144, 162, 98, 149, 14, 137],
};
pub const GUID_PACKETPROPERTY_GUID_Y_TILT_ORIENTATION: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 244523913, data2: 7543, data3: 17327, data4: [172, 0, 91, 149, 13, 109, 75, 45] };
pub const GUID_PACKETPROPERTY_GUID_Z: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 1935334192, data2: 3771, data3: 18312, data4: [160, 228, 15, 49, 100, 144, 5, 93] };
pub const GestureRecognizer: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 3929065044,
data2: 50732,
data3: 17439,
data4: [172, 0, 149, 249, 161, 150, 120, 44],
};
pub type HRECOALT = isize;
pub type HRECOCONTEXT = isize;
pub type HRECOGNIZER = isize;
pub type HRECOLATTICE = isize;
pub type HRECOWORDLIST = isize;
pub const HandwrittenTextInsertion: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 2668056290, data2: 59113, data3: 19850, data4: [160, 71, 235, 91, 92, 60, 85, 218] };
pub type IDynamicRenderer = *mut ::core::ffi::c_void;
pub const IECN_GESTURE: u32 = 2050u32;
pub const IECN_RECOGNITIONRESULT: u32 = 2051u32;
pub const IECN_STROKE: u32 = 2049u32;
pub const IECN__BASE: u32 = 2048u32;
#[repr(C)]
#[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_Com", feature = "Win32_System_Ole", feature = "Win32_UI_Controls"))]
pub struct IEC_GESTUREINFO {
pub nmhdr: super::Controls::NMHDR,
pub Cursor: IInkCursor,
pub Strokes: IInkStrokes,
pub Gestures: super::super::System::Com::VARIANT,
}
#[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_Com", feature = "Win32_System_Ole", feature = "Win32_UI_Controls"))]
impl ::core::marker::Copy for IEC_GESTUREINFO {}
#[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_Com", feature = "Win32_System_Ole", feature = "Win32_UI_Controls"))]
impl ::core::clone::Clone for IEC_GESTUREINFO {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(all(feature = "Win32_Foundation", feature = "Win32_UI_Controls"))]
pub struct IEC_RECOGNITIONRESULTINFO {
pub nmhdr: super::Controls::NMHDR,
pub RecognitionResult: IInkRecognitionResult,
}
#[cfg(all(feature = "Win32_Foundation", feature = "Win32_UI_Controls"))]
impl ::core::marker::Copy for IEC_RECOGNITIONRESULTINFO {}
#[cfg(all(feature = "Win32_Foundation", feature = "Win32_UI_Controls"))]
impl ::core::clone::Clone for IEC_RECOGNITIONRESULTINFO {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(all(feature = "Win32_Foundation", feature = "Win32_UI_Controls"))]
pub struct IEC_STROKEINFO {
pub nmhdr: super::Controls::NMHDR,
pub Cursor: IInkCursor,
pub Stroke: IInkStrokeDisp,
}
#[cfg(all(feature = "Win32_Foundation", feature = "Win32_UI_Controls"))]
impl ::core::marker::Copy for IEC_STROKEINFO {}
#[cfg(all(feature = "Win32_Foundation", feature = "Win32_UI_Controls"))]
impl ::core::clone::Clone for IEC_STROKEINFO {
fn clone(&self) -> Self {
*self
}
}
pub const IEC__BASE: u32 = 1536u32;
pub type IGestureRecognizer = *mut ::core::ffi::c_void;
pub type IHandwrittenTextInsertion = *mut ::core::ffi::c_void;
pub type IInk = *mut ::core::ffi::c_void;
pub type IInkCollector = *mut ::core::ffi::c_void;
pub type IInkCursor = *mut ::core::ffi::c_void;
pub type IInkCursorButton = *mut ::core::ffi::c_void;
pub type IInkCursorButtons = *mut ::core::ffi::c_void;
pub type IInkCursors = *mut ::core::ffi::c_void;
pub type IInkCustomStrokes = *mut ::core::ffi::c_void;
pub type IInkDisp = *mut ::core::ffi::c_void;
pub type IInkDivider = *mut ::core::ffi::c_void;
pub type IInkDivisionResult = *mut ::core::ffi::c_void;
pub type IInkDivisionUnit = *mut ::core::ffi::c_void;
pub type IInkDivisionUnits = *mut ::core::ffi::c_void;
pub type IInkDrawingAttributes = *mut ::core::ffi::c_void;
pub type IInkEdit = *mut ::core::ffi::c_void;
pub type IInkExtendedProperties = *mut ::core::ffi::c_void;
pub type IInkExtendedProperty = *mut ::core::ffi::c_void;
pub type IInkGesture = *mut ::core::ffi::c_void;
pub type IInkLineInfo = *mut ::core::ffi::c_void;
pub type IInkOverlay = *mut ::core::ffi::c_void;
pub type IInkPicture = *mut ::core::ffi::c_void;
pub type IInkRecognitionAlternate = *mut ::core::ffi::c_void;
pub type IInkRecognitionAlternates = *mut ::core::ffi::c_void;
pub type IInkRecognitionResult = *mut ::core::ffi::c_void;
pub type IInkRecognizer = *mut ::core::ffi::c_void;
pub type IInkRecognizer2 = *mut ::core::ffi::c_void;
pub type IInkRecognizerContext = *mut ::core::ffi::c_void;
pub type IInkRecognizerContext2 = *mut ::core::ffi::c_void;
pub type IInkRecognizerGuide = *mut ::core::ffi::c_void;
pub type IInkRecognizers = *mut ::core::ffi::c_void;
pub type IInkRectangle = *mut ::core::ffi::c_void;
pub type IInkRenderer = *mut ::core::ffi::c_void;
pub type IInkStrokeDisp = *mut ::core::ffi::c_void;
pub type IInkStrokes = *mut ::core::ffi::c_void;
pub type IInkTablet = *mut ::core::ffi::c_void;
pub type IInkTablet2 = *mut ::core::ffi::c_void;
pub type IInkTablet3 = *mut ::core::ffi::c_void;
pub type IInkTablets = *mut ::core::ffi::c_void;
pub type IInkTransform = *mut ::core::ffi::c_void;
pub type IInkWordList = *mut ::core::ffi::c_void;
pub type IInkWordList2 = *mut ::core::ffi::c_void;
pub type IInputPanelWindowHandle = *mut ::core::ffi::c_void;
pub type IMathInputControl = *mut ::core::ffi::c_void;
#[repr(C)]
pub struct INKMETRIC {
pub iHeight: i32,
pub iFontAscent: i32,
pub iFontDescent: i32,
pub dwFlags: u32,
pub color: u32,
}
impl ::core::marker::Copy for INKMETRIC {}
impl ::core::clone::Clone for INKMETRIC {
fn clone(&self) -> Self {
*self
}
}
pub const IP_CURSOR_DOWN: u32 = 1u32;
pub const IP_INVERTED: u32 = 2u32;
pub const IP_MARGIN: u32 = 4u32;
pub type IPenInputPanel = *mut ::core::ffi::c_void;
pub type IRealTimeStylus = *mut ::core::ffi::c_void;
pub type IRealTimeStylus2 = *mut ::core::ffi::c_void;
pub type IRealTimeStylus3 = *mut ::core::ffi::c_void;
pub type IRealTimeStylusSynchronization = *mut ::core::ffi::c_void;
pub type ISketchInk = *mut ::core::ffi::c_void;
pub type IStrokeBuilder = *mut ::core::ffi::c_void;
pub type IStylusAsyncPlugin = *mut ::core::ffi::c_void;
pub type IStylusPlugin = *mut ::core::ffi::c_void;
pub type IStylusSyncPlugin = *mut ::core::ffi::c_void;
pub type ITextInputPanel = *mut ::core::ffi::c_void;
pub type ITextInputPanelEventSink = *mut ::core::ffi::c_void;
pub type ITextInputPanelRunInfo = *mut ::core::ffi::c_void;
pub type ITipAutoCompleteClient = *mut ::core::ffi::c_void;
pub type ITipAutoCompleteProvider = *mut ::core::ffi::c_void;
pub type InPlaceDirection = i32;
pub const InPlaceDirection_Auto: InPlaceDirection = 0i32;
pub const InPlaceDirection_Bottom: InPlaceDirection = 1i32;
pub const InPlaceDirection_Top: InPlaceDirection = 2i32;
pub type InPlaceState = i32;
pub const InPlaceState_Auto: InPlaceState = 0i32;
pub const InPlaceState_HoverTarget: InPlaceState = 1i32;
pub const InPlaceState_Expanded: InPlaceState = 2i32;
pub const Ink: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 333335106,
data2: 36129,
data3: 19598,
data4: [191, 156, 143, 105, 203, 6, 143, 202],
};
pub type InkApplicationGesture = i32;
pub const IAG_AllGestures: InkApplicationGesture = 0i32;
pub const IAG_NoGesture: InkApplicationGesture = 61440i32;
pub const IAG_Scratchout: InkApplicationGesture = 61441i32;
pub const IAG_Triangle: InkApplicationGesture = 61442i32;
pub const IAG_Square: InkApplicationGesture = 61443i32;
pub const IAG_Star: InkApplicationGesture = 61444i32;
pub const IAG_Check: InkApplicationGesture = 61445i32;
pub const IAG_Curlicue: InkApplicationGesture = 61456i32;
pub const IAG_DoubleCurlicue: InkApplicationGesture = 61457i32;
pub const IAG_Circle: InkApplicationGesture = 61472i32;
pub const IAG_DoubleCircle: InkApplicationGesture = 61473i32;
pub const IAG_SemiCircleLeft: InkApplicationGesture = 61480i32;
pub const IAG_SemiCircleRight: InkApplicationGesture = 61481i32;
pub const IAG_ChevronUp: InkApplicationGesture = 61488i32;
pub const IAG_ChevronDown: InkApplicationGesture = 61489i32;
pub const IAG_ChevronLeft: InkApplicationGesture = 61490i32;
pub const IAG_ChevronRight: InkApplicationGesture = 61491i32;
pub const IAG_ArrowUp: InkApplicationGesture = 61496i32;
pub const IAG_ArrowDown: InkApplicationGesture = 61497i32;
pub const IAG_ArrowLeft: InkApplicationGesture = 61498i32;
pub const IAG_ArrowRight: InkApplicationGesture = 61499i32;
pub const IAG_Up: InkApplicationGesture = 61528i32;
pub const IAG_Down: InkApplicationGesture = 61529i32;
pub const IAG_Left: InkApplicationGesture = 61530i32;
pub const IAG_Right: InkApplicationGesture = 61531i32;
pub const IAG_UpDown: InkApplicationGesture = 61536i32;
pub const IAG_DownUp: InkApplicationGesture = 61537i32;
pub const IAG_LeftRight: InkApplicationGesture = 61538i32;
pub const IAG_RightLeft: InkApplicationGesture = 61539i32;
pub const IAG_UpLeftLong: InkApplicationGesture = 61540i32;
pub const IAG_UpRightLong: InkApplicationGesture = 61541i32;
pub const IAG_DownLeftLong: InkApplicationGesture = 61542i32;
pub const IAG_DownRightLong: InkApplicationGesture = 61543i32;
pub const IAG_UpLeft: InkApplicationGesture = 61544i32;
pub const IAG_UpRight: InkApplicationGesture = 61545i32;
pub const IAG_DownLeft: InkApplicationGesture = 61546i32;
pub const IAG_DownRight: InkApplicationGesture = 61547i32;
pub const IAG_LeftUp: InkApplicationGesture = 61548i32;
pub const IAG_LeftDown: InkApplicationGesture = 61549i32;
pub const IAG_RightUp: InkApplicationGesture = 61550i32;
pub const IAG_RightDown: InkApplicationGesture = 61551i32;
pub const IAG_Exclamation: InkApplicationGesture = 61604i32;
pub const IAG_Tap: InkApplicationGesture = 61680i32;
pub const IAG_DoubleTap: InkApplicationGesture = 61681i32;
pub type InkBoundingBoxMode = i32;
pub const IBBM_Default: InkBoundingBoxMode = 0i32;
pub const IBBM_NoCurveFit: InkBoundingBoxMode = 1i32;
pub const IBBM_CurveFit: InkBoundingBoxMode = 2i32;
pub const IBBM_PointsOnly: InkBoundingBoxMode = 3i32;
pub const IBBM_Union: InkBoundingBoxMode = 4i32;
pub type InkClipboardFormats = i32;
pub const ICF_None: InkClipboardFormats = 0i32;
pub const ICF_InkSerializedFormat: InkClipboardFormats = 1i32;
pub const ICF_SketchInk: InkClipboardFormats = 2i32;
pub const ICF_TextInk: InkClipboardFormats = 6i32;
pub const ICF_EnhancedMetafile: InkClipboardFormats = 8i32;
pub const ICF_Metafile: InkClipboardFormats = 32i32;
pub const ICF_Bitmap: InkClipboardFormats = 64i32;
pub const ICF_PasteMask: InkClipboardFormats = 7i32;
pub const ICF_CopyMask: InkClipboardFormats = 127i32;
pub const ICF_Default: InkClipboardFormats = 127i32;
pub type InkClipboardModes = i32;
pub const ICB_Copy: InkClipboardModes = 0i32;
pub const ICB_Cut: InkClipboardModes = 1i32;
pub const ICB_ExtractOnly: InkClipboardModes = 48i32;
pub const ICB_DelayedCopy: InkClipboardModes = 32i32;
pub const ICB_Default: InkClipboardModes = 0i32;
pub type InkCollectionMode = i32;
pub const ICM_InkOnly: InkCollectionMode = 0i32;
pub const ICM_GestureOnly: InkCollectionMode = 1i32;
pub const ICM_InkAndGesture: InkCollectionMode = 2i32;
pub const InkCollector: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 1140528467,
data2: 44404,
data3: 20200,
data4: [136, 228, 62, 109, 170, 201, 21, 219],
};
pub const InkCollectorClipInkToMargin: i32 = 0i32;
pub const InkCollectorDefaultMargin: i32 = -2147483648i32;
pub type InkCollectorEventInterest = i32;
pub const ICEI_DefaultEvents: InkCollectorEventInterest = -1i32;
pub const ICEI_CursorDown: InkCollectorEventInterest = 0i32;
pub const ICEI_Stroke: InkCollectorEventInterest = 1i32;
pub const ICEI_NewPackets: InkCollectorEventInterest = 2i32;
pub const ICEI_NewInAirPackets: InkCollectorEventInterest = 3i32;
pub const ICEI_CursorButtonDown: InkCollectorEventInterest = 4i32;
pub const ICEI_CursorButtonUp: InkCollectorEventInterest = 5i32;
pub const ICEI_CursorInRange: InkCollectorEventInterest = 6i32;
pub const ICEI_CursorOutOfRange: InkCollectorEventInterest = 7i32;
pub const ICEI_SystemGesture: InkCollectorEventInterest = 8i32;
pub const ICEI_TabletAdded: InkCollectorEventInterest = 9i32;
pub const ICEI_TabletRemoved: InkCollectorEventInterest = 10i32;
pub const ICEI_MouseDown: InkCollectorEventInterest = 11i32;
pub const ICEI_MouseMove: InkCollectorEventInterest = 12i32;
pub const ICEI_MouseUp: InkCollectorEventInterest = 13i32;
pub const ICEI_MouseWheel: InkCollectorEventInterest = 14i32;
pub const ICEI_DblClick: InkCollectorEventInterest = 15i32;
pub const ICEI_AllEvents: InkCollectorEventInterest = 16i32;
pub type InkCursorButtonState = i32;
pub const ICBS_Unavailable: InkCursorButtonState = 0i32;
pub const ICBS_Up: InkCursorButtonState = 1i32;
pub const ICBS_Down: InkCursorButtonState = 2i32;
pub const InkDisp: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 2474383924,
data2: 5405,
data3: 17936,
data4: [156, 166, 168, 204, 155, 219, 93, 131],
};
pub type InkDisplayMode = i32;
pub const IDM_Ink: InkDisplayMode = 0i32;
pub const IDM_Text: InkDisplayMode = 1i32;
pub const InkDivider: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 2287269536,
data2: 18051,
data3: 19175,
data4: [145, 145, 117, 47, 230, 70, 18, 195],
};
pub type InkDivisionType = i32;
pub const IDT_Segment: InkDivisionType = 0i32;
pub const IDT_Line: InkDivisionType = 1i32;
pub const IDT_Paragraph: InkDivisionType = 2i32;
pub const IDT_Drawing: InkDivisionType = 3i32;
pub const InkDrawingAttributes: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 3636408994,
data2: 1445,
data3: 17603,
data4: [179, 170, 94, 128, 172, 125, 37, 118],
};
pub const InkEdit: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 3855243765,
data2: 22468,
data3: 19928,
data4: [155, 214, 29, 238, 237, 210, 122, 244],
};
pub type InkEditStatus = i32;
pub const IES_Idle: InkEditStatus = 0i32;
pub const IES_Collecting: InkEditStatus = 1i32;
pub const IES_Recognizing: InkEditStatus = 2i32;
pub type InkExtractFlags = i32;
pub const IEF_CopyFromOriginal: InkExtractFlags = 0i32;
pub const IEF_RemoveFromOriginal: InkExtractFlags = 1i32;
pub const IEF_Default: InkExtractFlags = 1i32;
pub type InkInsertMode = i32;
pub const IEM_InsertText: InkInsertMode = 0i32;
pub const IEM_InsertInk: InkInsertMode = 1i32;
pub const InkMaxTransparencyValue: i32 = 255i32;
pub const InkMinTransparencyValue: i32 = 0i32;
pub type InkMode = i32;
pub const IEM_Disabled: InkMode = 0i32;
pub const IEM_Ink: InkMode = 1i32;
pub const IEM_InkAndGesture: InkMode = 2i32;
pub type InkMouseButton = i32;
pub const IMF_Left: InkMouseButton = 1i32;
pub const IMF_Right: InkMouseButton = 2i32;
pub const IMF_Middle: InkMouseButton = 4i32;
pub type InkMousePointer = i32;
pub const IMP_Default: InkMousePointer = 0i32;
pub const IMP_Arrow: InkMousePointer = 1i32;
pub const IMP_Crosshair: InkMousePointer = 2i32;
pub const IMP_Ibeam: InkMousePointer = 3i32;
pub const IMP_SizeNESW: InkMousePointer = 4i32;
pub const IMP_SizeNS: InkMousePointer = 5i32;
pub const IMP_SizeNWSE: InkMousePointer = 6i32;
pub const IMP_SizeWE: InkMousePointer = 7i32;
pub const IMP_UpArrow: InkMousePointer = 8i32;
pub const IMP_Hourglass: InkMousePointer = 9i32;
pub const IMP_NoDrop: InkMousePointer = 10i32;
pub const IMP_ArrowHourglass: InkMousePointer = 11i32;
pub const IMP_ArrowQuestion: InkMousePointer = 12i32;
pub const IMP_SizeAll: InkMousePointer = 13i32;
pub const IMP_Hand: InkMousePointer = 14i32;
pub const IMP_Custom: InkMousePointer = 99i32;
pub const InkOverlay: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 1708131910,
data2: 52707,
data3: 19080,
data4: [145, 99, 103, 105, 240, 241, 169, 125],
};
pub type InkOverlayAttachMode = i32;
pub const IOAM_Behind: InkOverlayAttachMode = 0i32;
pub const IOAM_InFront: InkOverlayAttachMode = 1i32;
pub type InkOverlayEditingMode = i32;
pub const IOEM_Ink: InkOverlayEditingMode = 0i32;
pub const IOEM_Delete: InkOverlayEditingMode = 1i32;
pub const IOEM_Select: InkOverlayEditingMode = 2i32;
pub type InkOverlayEraserMode = i32;
pub const IOERM_StrokeErase: InkOverlayEraserMode = 0i32;
pub const IOERM_PointErase: InkOverlayEraserMode = 1i32;
pub type InkPenTip = i32;
pub const IPT_Ball: InkPenTip = 0i32;
pub const IPT_Rectangle: InkPenTip = 1i32;
pub type InkPersistenceCompressionMode = i32;
pub const IPCM_Default: InkPersistenceCompressionMode = 0i32;
pub const IPCM_MaximumCompression: InkPersistenceCompressionMode = 1i32;
pub const IPCM_NoCompression: InkPersistenceCompressionMode = 2i32;
pub type InkPersistenceFormat = i32;
pub const IPF_InkSerializedFormat: InkPersistenceFormat = 0i32;
pub const IPF_Base64InkSerializedFormat: InkPersistenceFormat = 1i32;
pub const IPF_GIF: InkPersistenceFormat = 2i32;
pub const IPF_Base64GIF: InkPersistenceFormat = 3i32;
pub const InkPicture: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 77718867, data2: 65078, data3: 20446, data4: [134, 94, 52, 65, 148, 230, 148, 36] };
pub type InkPictureSizeMode = i32;
pub const IPSM_AutoSize: InkPictureSizeMode = 0i32;
pub const IPSM_CenterImage: InkPictureSizeMode = 1i32;
pub const IPSM_Normal: InkPictureSizeMode = 2i32;
pub const IPSM_StretchImage: InkPictureSizeMode = 3i32;
pub type InkRasterOperation = i32;
pub const IRO_Black: InkRasterOperation = 1i32;
pub const IRO_NotMergePen: InkRasterOperation = 2i32;
pub const IRO_MaskNotPen: InkRasterOperation = 3i32;
pub const IRO_NotCopyPen: InkRasterOperation = 4i32;
pub const IRO_MaskPenNot: InkRasterOperation = 5i32;
pub const IRO_Not: InkRasterOperation = 6i32;
pub const IRO_XOrPen: InkRasterOperation = 7i32;
pub const IRO_NotMaskPen: InkRasterOperation = 8i32;
pub const IRO_MaskPen: InkRasterOperation = 9i32;
pub const IRO_NotXOrPen: InkRasterOperation = 10i32;
pub const IRO_NoOperation: InkRasterOperation = 11i32;
pub const IRO_MergeNotPen: InkRasterOperation = 12i32;
pub const IRO_CopyPen: InkRasterOperation = 13i32;
pub const IRO_MergePenNot: InkRasterOperation = 14i32;
pub const IRO_MergePen: InkRasterOperation = 15i32;
pub const IRO_White: InkRasterOperation = 16i32;
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct InkRecoGuide {
pub rectWritingBox: super::super::Foundation::RECT,
pub rectDrawnBox: super::super::Foundation::RECT,
pub cRows: i32,
pub cColumns: i32,
pub midline: i32,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for InkRecoGuide {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for InkRecoGuide {
fn clone(&self) -> Self {
*self
}
}
pub type InkRecognitionAlternatesSelection = i32;
pub const IRAS_Start: InkRecognitionAlternatesSelection = 0i32;
pub const IRAS_DefaultCount: InkRecognitionAlternatesSelection = 10i32;
pub const IRAS_All: InkRecognitionAlternatesSelection = -1i32;
pub type InkRecognitionConfidence = i32;
pub const IRC_Strong: InkRecognitionConfidence = 0i32;
pub const IRC_Intermediate: InkRecognitionConfidence = 1i32;
pub const IRC_Poor: InkRecognitionConfidence = 2i32;
pub type InkRecognitionModes = i32;
pub const IRM_None: InkRecognitionModes = 0i32;
pub const IRM_WordModeOnly: InkRecognitionModes = 1i32;
pub const IRM_Coerce: InkRecognitionModes = 2i32;
pub const IRM_TopInkBreaksOnly: InkRecognitionModes = 4i32;
pub const IRM_PrefixOk: InkRecognitionModes = 8i32;
pub const IRM_LineMode: InkRecognitionModes = 16i32;
pub const IRM_DisablePersonalization: InkRecognitionModes = 32i32;
pub const IRM_AutoSpace: InkRecognitionModes = 64i32;
pub const IRM_Max: InkRecognitionModes = 128i32;
pub type InkRecognitionStatus = i32;
pub const IRS_NoError: InkRecognitionStatus = 0i32;
pub const IRS_Interrupted: InkRecognitionStatus = 1i32;
pub const IRS_ProcessFailed: InkRecognitionStatus = 2i32;
pub const IRS_InkAddedFailed: InkRecognitionStatus = 4i32;
pub const IRS_SetAutoCompletionModeFailed: InkRecognitionStatus = 8i32;
pub const IRS_SetStrokesFailed: InkRecognitionStatus = 16i32;
pub const IRS_SetGuideFailed: InkRecognitionStatus = 32i32;
pub const IRS_SetFlagsFailed: InkRecognitionStatus = 64i32;
pub const IRS_SetFactoidFailed: InkRecognitionStatus = 128i32;
pub const IRS_SetPrefixSuffixFailed: InkRecognitionStatus = 256i32;
pub const IRS_SetWordListFailed: InkRecognitionStatus = 512i32;
pub type InkRecognizerCapabilities = i32;
pub const IRC_DontCare: InkRecognizerCapabilities = 1i32;
pub const IRC_Object: InkRecognizerCapabilities = 2i32;
pub const IRC_FreeInput: InkRecognizerCapabilities = 4i32;
pub const IRC_LinedInput: InkRecognizerCapabilities = 8i32;
pub const IRC_BoxedInput: InkRecognizerCapabilities = 16i32;
pub const IRC_CharacterAutoCompletionInput: InkRecognizerCapabilities = 32i32;
pub const IRC_RightAndDown: InkRecognizerCapabilities = 64i32;
pub const IRC_LeftAndDown: InkRecognizerCapabilities = 128i32;
pub const IRC_DownAndLeft: InkRecognizerCapabilities = 256i32;
pub const IRC_DownAndRight: InkRecognizerCapabilities = 512i32;
pub const IRC_ArbitraryAngle: InkRecognizerCapabilities = 1024i32;
pub const IRC_Lattice: InkRecognizerCapabilities = 2048i32;
pub const IRC_AdviseInkChange: InkRecognizerCapabilities = 4096i32;
pub const IRC_StrokeReorder: InkRecognizerCapabilities = 8192i32;
pub const IRC_Personalizable: InkRecognizerCapabilities = 16384i32;
pub const IRC_PrefersArbitraryAngle: InkRecognizerCapabilities = 32768i32;
pub const IRC_PrefersParagraphBreaking: InkRecognizerCapabilities = 65536i32;
pub const IRC_PrefersSegmentation: InkRecognizerCapabilities = 131072i32;
pub const IRC_Cursive: InkRecognizerCapabilities = 262144i32;
pub const IRC_TextPrediction: InkRecognizerCapabilities = 524288i32;
pub const IRC_Alpha: InkRecognizerCapabilities = 1048576i32;
pub const IRC_Beta: InkRecognizerCapabilities = 2097152i32;
pub type InkRecognizerCharacterAutoCompletionMode = i32;
pub const IRCACM_Full: InkRecognizerCharacterAutoCompletionMode = 0i32;
pub const IRCACM_Prefix: InkRecognizerCharacterAutoCompletionMode = 1i32;
pub const IRCACM_Random: InkRecognizerCharacterAutoCompletionMode = 2i32;
pub const InkRecognizerContext: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 2864998967,
data2: 37417,
data3: 20416,
data4: [140, 206, 68, 151, 86, 155, 244, 209],
};
pub const InkRecognizerGuide: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 2272319809,
data2: 42554,
data3: 18033,
data4: [163, 117, 40, 85, 161, 142, 186, 115],
};
pub const InkRecognizers: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 2681530376, data2: 63206, data3: 20069, data4: [152, 211, 170, 57, 5, 76, 18, 85] };
pub const InkRectangle: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 1135637286, data2: 43744, data3: 19298, data4: [168, 61, 95, 215, 104, 183, 53, 60] };
pub const InkRenderer: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 2619131620,
data2: 55275,
data3: 20203,
data4: [144, 145, 21, 167, 200, 121, 30, 217],
};
pub type InkSelectionConstants = i32;
pub const ISC_FirstElement: InkSelectionConstants = 0i32;
pub const ISC_AllElements: InkSelectionConstants = -1i32;
pub type InkShiftKeyModifierFlags = i32;
pub const IKM_Shift: InkShiftKeyModifierFlags = 1i32;
pub const IKM_Control: InkShiftKeyModifierFlags = 2i32;
pub const IKM_Alt: InkShiftKeyModifierFlags = 4i32;
pub const InkStrokes: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 1223987644, data2: 9230, data3: 18528, data4: [176, 121, 161, 233, 77, 61, 44, 134] };
pub type InkSystemGesture = i32;
pub const ISG_Tap: InkSystemGesture = 16i32;
pub const ISG_DoubleTap: InkSystemGesture = 17i32;
pub const ISG_RightTap: InkSystemGesture = 18i32;
pub const ISG_Drag: InkSystemGesture = 19i32;
pub const ISG_RightDrag: InkSystemGesture = 20i32;
pub const ISG_HoldEnter: InkSystemGesture = 21i32;
pub const ISG_HoldLeave: InkSystemGesture = 22i32;
pub const ISG_HoverEnter: InkSystemGesture = 23i32;
pub const ISG_HoverLeave: InkSystemGesture = 24i32;
pub const ISG_Flick: InkSystemGesture = 31i32;
pub const InkTablets: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 1850723090, data2: 20746, data3: 19776, data4: [147, 4, 29, 161, 10, 233, 20, 124] };
pub const InkTransform: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 3822442812, data2: 5731, data3: 19064, data4: [161, 167, 34, 55, 93, 254, 186, 238] };
pub const InkWordList: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 2649247892,
data2: 63263,
data3: 17649,
data4: [132, 113, 21, 162, 250, 118, 252, 243],
};
pub type InteractionMode = i32;
pub const InteractionMode_InPlace: InteractionMode = 0i32;
pub const InteractionMode_Floating: InteractionMode = 1i32;
pub const InteractionMode_DockedTop: InteractionMode = 2i32;
pub const InteractionMode_DockedBottom: InteractionMode = 3i32;
pub type KEYMODIFIER = i32;
pub const KEYMODIFIER_CONTROL: KEYMODIFIER = 1i32;
pub const KEYMODIFIER_MENU: KEYMODIFIER = 2i32;
pub const KEYMODIFIER_SHIFT: KEYMODIFIER = 4i32;
pub const KEYMODIFIER_WIN: KEYMODIFIER = 8i32;
pub const KEYMODIFIER_ALTGR: KEYMODIFIER = 16i32;
pub const KEYMODIFIER_EXT: KEYMODIFIER = 32i32;
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct LATTICE_METRICS {
pub lsBaseline: LINE_SEGMENT,
pub iMidlineOffset: i16,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for LATTICE_METRICS {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for LATTICE_METRICS {
fn clone(&self) -> Self {
*self
}
}
pub type LINE_METRICS = i32;
pub const LM_BASELINE: LINE_METRICS = 0i32;
pub const LM_MIDLINE: LINE_METRICS = 1i32;
pub const LM_ASCENDER: LINE_METRICS = 2i32;
pub const LM_DESCENDER: LINE_METRICS = 3i32;
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct LINE_SEGMENT {
pub PtA: super::super::Foundation::POINT,
pub PtB: super::super::Foundation::POINT,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for LINE_SEGMENT {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for LINE_SEGMENT {
fn clone(&self) -> Self {
*self
}
}
pub const MAX_FRIENDLYNAME: u32 = 64u32;
pub const MAX_LANGUAGES: u32 = 64u32;
pub const MAX_PACKET_BUTTON_COUNT: u32 = 32u32;
pub const MAX_PACKET_PROPERTY_COUNT: u32 = 32u32;
pub const MAX_VENDORNAME: u32 = 32u32;
pub const MICROSOFT_TIP_COMBOBOXLIST_PROPERTY: &'static str = "Microsoft TIP ComboBox List Window Identifier";
pub const MICROSOFT_TIP_NO_INSERT_BUTTON_PROPERTY: &'static str = "Microsoft TIP No Insert Option";
pub const MICROSOFT_TIP_OPENING_MSG: &'static str = "TabletInputPanelOpening";
pub const MICROSOFT_URL_EXPERIENCE_PROPERTY: &'static str = "Microsoft TIP URL Experience";
pub type MICUIELEMENT = i32;
pub const MICUIELEMENT_BUTTON_WRITE: MICUIELEMENT = 1i32;
pub const MICUIELEMENT_BUTTON_ERASE: MICUIELEMENT = 2i32;
pub const MICUIELEMENT_BUTTON_CORRECT: MICUIELEMENT = 4i32;
pub const MICUIELEMENT_BUTTON_CLEAR: MICUIELEMENT = 8i32;
pub const MICUIELEMENT_BUTTON_UNDO: MICUIELEMENT = 16i32;
pub const MICUIELEMENT_BUTTON_REDO: MICUIELEMENT = 32i32;
pub const MICUIELEMENT_BUTTON_INSERT: MICUIELEMENT = 64i32;
pub const MICUIELEMENT_BUTTON_CANCEL: MICUIELEMENT = 128i32;
pub const MICUIELEMENT_INKPANEL_BACKGROUND: MICUIELEMENT = 256i32;
pub const MICUIELEMENT_RESULTPANEL_BACKGROUND: MICUIELEMENT = 512i32;
pub type MICUIELEMENTSTATE = i32;
pub const MICUIELEMENTSTATE_NORMAL: MICUIELEMENTSTATE = 1i32;
pub const MICUIELEMENTSTATE_HOT: MICUIELEMENTSTATE = 2i32;
pub const MICUIELEMENTSTATE_PRESSED: MICUIELEMENTSTATE = 3i32;
pub const MICUIELEMENTSTATE_DISABLED: MICUIELEMENTSTATE = 4i32;
pub const MathInputControl: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 3311501676,
data2: 5336,
data3: 16528,
data4: [131, 12, 152, 217, 148, 178, 28, 123],
};
pub type MouseButton = i32;
pub const NO_BUTTON: MouseButton = 0i32;
pub const LEFT_BUTTON: MouseButton = 1i32;
pub const RIGHT_BUTTON: MouseButton = 2i32;
pub const MIDDLE_BUTTON: MouseButton = 4i32;
pub const NUM_FLICK_DIRECTIONS: u32 = 8u32;
#[repr(C)]
pub struct PACKET_DESCRIPTION {
pub cbPacketSize: u32,
pub cPacketProperties: u32,
pub pPacketProperties: *mut PACKET_PROPERTY,
pub cButtons: u32,
pub pguidButtons: *mut ::windows_sys::core::GUID,
}
impl ::core::marker::Copy for PACKET_DESCRIPTION {}
impl ::core::clone::Clone for PACKET_DESCRIPTION {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
pub struct PACKET_PROPERTY {
pub guid: ::windows_sys::core::GUID,
pub PropertyMetrics: PROPERTY_METRICS,
}
impl ::core::marker::Copy for PACKET_PROPERTY {}
impl ::core::clone::Clone for PACKET_PROPERTY {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
pub struct PROPERTY_METRICS {
pub nLogicalMin: i32,
pub nLogicalMax: i32,
pub Units: PROPERTY_UNITS,
pub fResolution: f32,
}
impl ::core::marker::Copy for PROPERTY_METRICS {}
impl ::core::clone::Clone for PROPERTY_METRICS {
fn clone(&self) -> Self {
*self
}
}
pub type PROPERTY_UNITS = i32;
pub const PROPERTY_UNITS_DEFAULT: PROPERTY_UNITS = 0i32;
pub const PROPERTY_UNITS_INCHES: PROPERTY_UNITS = 1i32;
pub const PROPERTY_UNITS_CENTIMETERS: PROPERTY_UNITS = 2i32;
pub const PROPERTY_UNITS_DEGREES: PROPERTY_UNITS = 3i32;
pub const PROPERTY_UNITS_RADIANS: PROPERTY_UNITS = 4i32;
pub const PROPERTY_UNITS_SECONDS: PROPERTY_UNITS = 5i32;
pub const PROPERTY_UNITS_POUNDS: PROPERTY_UNITS = 6i32;
pub const PROPERTY_UNITS_GRAMS: PROPERTY_UNITS = 7i32;
pub const PROPERTY_UNITS_SILINEAR: PROPERTY_UNITS = 8i32;
pub const PROPERTY_UNITS_SIROTATION: PROPERTY_UNITS = 9i32;
pub const PROPERTY_UNITS_ENGLINEAR: PROPERTY_UNITS = 10i32;
pub const PROPERTY_UNITS_ENGROTATION: PROPERTY_UNITS = 11i32;
pub const PROPERTY_UNITS_SLUGS: PROPERTY_UNITS = 12i32;
pub const PROPERTY_UNITS_KELVIN: PROPERTY_UNITS = 13i32;
pub const PROPERTY_UNITS_FAHRENHEIT: PROPERTY_UNITS = 14i32;
pub const PROPERTY_UNITS_AMPERE: PROPERTY_UNITS = 15i32;
pub const PROPERTY_UNITS_CANDELA: PROPERTY_UNITS = 16i32;
pub type PanelInputArea = i32;
pub const PanelInputArea_Auto: PanelInputArea = 0i32;
pub const PanelInputArea_Keyboard: PanelInputArea = 1i32;
pub const PanelInputArea_WritingPad: PanelInputArea = 2i32;
pub const PanelInputArea_CharacterPad: PanelInputArea = 3i32;
pub type PanelType = i32;
pub const PT_Default: PanelType = 0i32;
pub const PT_Inactive: PanelType = 1i32;
pub const PT_Handwriting: PanelType = 2i32;
pub const PT_Keyboard: PanelType = 3i32;
pub const PenInputPanel: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 4148487318,
data2: 7002,
data3: 18590,
data4: [129, 220, 251, 215, 172, 98, 152, 168],
};
pub const PenInputPanel_Internal: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 2150309817, data2: 1387, data3: 18208, data4: [176, 204, 128, 210, 59, 113, 23, 30] };
pub type PfnRecoCallback = ::core::option::Option<unsafe extern "system" fn(param0: u32, param1: *mut u8, param2: HRECOCONTEXT) -> ::windows_sys::core::HRESULT>;
pub const RECOCONF_HIGHCONFIDENCE: u32 = 1u32;
pub const RECOCONF_LOWCONFIDENCE: i32 = -1i32;
pub const RECOCONF_MEDIUMCONFIDENCE: u32 = 0u32;
pub const RECOCONF_NOTSET: u32 = 128u32;
pub const RECOFLAG_AUTOSPACE: u32 = 64u32;
pub const RECOFLAG_COERCE: u32 = 2u32;
pub const RECOFLAG_DISABLEPERSONALIZATION: u32 = 32u32;
pub const RECOFLAG_LINEMODE: u32 = 16u32;
pub const RECOFLAG_PREFIXOK: u32 = 8u32;
pub const RECOFLAG_SINGLESEG: u32 = 4u32;
pub const RECOFLAG_WORDMODE: u32 = 1u32;
#[repr(C)]
pub struct RECO_ATTRS {
pub dwRecoCapabilityFlags: u32,
pub awcVendorName: [u16; 32],
pub awcFriendlyName: [u16; 64],
pub awLanguageId: [u16; 64],
}
impl ::core::marker::Copy for RECO_ATTRS {}
impl ::core::clone::Clone for RECO_ATTRS {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
pub struct RECO_GUIDE {
pub xOrigin: i32,
pub yOrigin: i32,
pub cxBox: i32,
pub cyBox: i32,
pub cxBase: i32,
pub cyBase: i32,
pub cHorzBox: i32,
pub cVertBox: i32,
pub cyMid: i32,
}
impl ::core::marker::Copy for RECO_GUIDE {}
impl ::core::clone::Clone for RECO_GUIDE {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
pub struct RECO_LATTICE {
pub ulColumnCount: u32,
pub pLatticeColumns: *mut RECO_LATTICE_COLUMN,
pub ulPropertyCount: u32,
pub pGuidProperties: *mut ::windows_sys::core::GUID,
pub ulBestResultColumnCount: u32,
pub pulBestResultColumns: *mut u32,
pub pulBestResultIndexes: *mut u32,
}
impl ::core::marker::Copy for RECO_LATTICE {}
impl ::core::clone::Clone for RECO_LATTICE {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
pub struct RECO_LATTICE_COLUMN {
pub key: u32,
pub cpProp: RECO_LATTICE_PROPERTIES,
pub cStrokes: u32,
pub pStrokes: *mut u32,
pub cLatticeElements: u32,
pub pLatticeElements: *mut RECO_LATTICE_ELEMENT,
}
impl ::core::marker::Copy for RECO_LATTICE_COLUMN {}
impl ::core::clone::Clone for RECO_LATTICE_COLUMN {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
pub struct RECO_LATTICE_ELEMENT {
pub score: i32,
pub r#type: u16,
pub pData: *mut u8,
pub ulNextColumn: u32,
pub ulStrokeNumber: u32,
pub epProp: RECO_LATTICE_PROPERTIES,
}
impl ::core::marker::Copy for RECO_LATTICE_ELEMENT {}
impl ::core::clone::Clone for RECO_LATTICE_ELEMENT {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
pub struct RECO_LATTICE_PROPERTIES {
pub cProperties: u32,
pub apProps: *mut *mut RECO_LATTICE_PROPERTY,
}
impl ::core::marker::Copy for RECO_LATTICE_PROPERTIES {}
impl ::core::clone::Clone for RECO_LATTICE_PROPERTIES {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
pub struct RECO_LATTICE_PROPERTY {
pub guidProperty: ::windows_sys::core::GUID,
pub cbPropertyValue: u16,
pub pPropertyValue: *mut u8,
}
impl ::core::marker::Copy for RECO_LATTICE_PROPERTY {}
impl ::core::clone::Clone for RECO_LATTICE_PROPERTY {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
pub struct RECO_RANGE {
pub iwcBegin: u32,
pub cCount: u32,
}
impl ::core::marker::Copy for RECO_RANGE {}
impl ::core::clone::Clone for RECO_RANGE {
fn clone(&self) -> Self {
*self
}
}
pub const RF_ADVISEINKCHANGE: i32 = 4096i32;
pub const RF_ARBITRARY_ANGLE: i32 = 1024i32;
pub const RF_BOXED_INPUT: i32 = 16i32;
pub const RF_CAC_INPUT: i32 = 32i32;
pub const RF_DONTCARE: i32 = 1i32;
pub const RF_DOWN_AND_LEFT: i32 = 256i32;
pub const RF_DOWN_AND_RIGHT: i32 = 512i32;
pub const RF_FREE_INPUT: i32 = 4i32;
pub const RF_LATTICE: i32 = 2048i32;
pub const RF_LEFT_AND_DOWN: i32 = 128i32;
pub const RF_LINED_INPUT: i32 = 8i32;
pub const RF_OBJECT: i32 = 2i32;
pub const RF_PERFORMSLINEBREAKING: i32 = 65536i32;
pub const RF_PERSONALIZABLE: i32 = 16384i32;
pub const RF_REQUIRESSEGMENTATIONBREAKING: i32 = 131072i32;
pub const RF_RIGHT_AND_DOWN: i32 = 64i32;
pub const RF_STROKEREORDER: i32 = 8192i32;
pub const RealTimeStylus: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 3798677101,
data2: 63896,
data3: 17358,
data4: [131, 111, 203, 109, 144, 68, 50, 176],
};
pub type RealTimeStylusDataInterest = i32;
pub const RTSDI_AllData: RealTimeStylusDataInterest = -1i32;
pub const RTSDI_None: RealTimeStylusDataInterest = 0i32;
pub const RTSDI_Error: RealTimeStylusDataInterest = 1i32;
pub const RTSDI_RealTimeStylusEnabled: RealTimeStylusDataInterest = 2i32;
pub const RTSDI_RealTimeStylusDisabled: RealTimeStylusDataInterest = 4i32;
pub const RTSDI_StylusNew: RealTimeStylusDataInterest = 8i32;
pub const RTSDI_StylusInRange: RealTimeStylusDataInterest = 16i32;
pub const RTSDI_InAirPackets: RealTimeStylusDataInterest = 32i32;
pub const RTSDI_StylusOutOfRange: RealTimeStylusDataInterest = 64i32;
pub const RTSDI_StylusDown: RealTimeStylusDataInterest = 128i32;
pub const RTSDI_Packets: RealTimeStylusDataInterest = 256i32;
pub const RTSDI_StylusUp: RealTimeStylusDataInterest = 512i32;
pub const RTSDI_StylusButtonUp: RealTimeStylusDataInterest = 1024i32;
pub const RTSDI_StylusButtonDown: RealTimeStylusDataInterest = 2048i32;
pub const RTSDI_SystemEvents: RealTimeStylusDataInterest = 4096i32;
pub const RTSDI_TabletAdded: RealTimeStylusDataInterest = 8192i32;
pub const RTSDI_TabletRemoved: RealTimeStylusDataInterest = 16384i32;
pub const RTSDI_CustomStylusDataAdded: RealTimeStylusDataInterest = 32768i32;
pub const RTSDI_UpdateMapping: RealTimeStylusDataInterest = 65536i32;
pub const RTSDI_DefaultEvents: RealTimeStylusDataInterest = 37766i32;
pub type RealTimeStylusLockType = i32;
pub const RTSLT_ObjLock: RealTimeStylusLockType = 1i32;
pub const RTSLT_SyncEventLock: RealTimeStylusLockType = 2i32;
pub const RTSLT_AsyncEventLock: RealTimeStylusLockType = 4i32;
pub const RTSLT_ExcludeCallback: RealTimeStylusLockType = 8i32;
pub const RTSLT_SyncObjLock: RealTimeStylusLockType = 11i32;
pub const RTSLT_AsyncObjLock: RealTimeStylusLockType = 13i32;
pub const SAFE_PARTIAL: u32 = 1u32;
pub type SCROLLDIRECTION = i32;
pub const SCROLLDIRECTION_UP: SCROLLDIRECTION = 0i32;
pub const SCROLLDIRECTION_DOWN: SCROLLDIRECTION = 1i32;
#[repr(C)]
pub struct STROKE_RANGE {
pub iStrokeBegin: u32,
pub iStrokeEnd: u32,
}
impl ::core::marker::Copy for STROKE_RANGE {}
impl ::core::clone::Clone for STROKE_RANGE {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
pub struct SYSTEM_EVENT_DATA {
pub bModifier: u8,
pub wKey: u16,
pub xPos: i32,
pub yPos: i32,
pub bCursorMode: u8,
pub dwButtonState: u32,
}
impl ::core::marker::Copy for SYSTEM_EVENT_DATA {}
impl ::core::clone::Clone for SYSTEM_EVENT_DATA {
fn clone(&self) -> Self {
*self
}
}
pub type ScrollBarsConstants = i32;
pub const rtfNone: ScrollBarsConstants = 0i32;
pub const rtfHorizontal: ScrollBarsConstants = 1i32;
pub const rtfVertical: ScrollBarsConstants = 2i32;
pub const rtfBoth: ScrollBarsConstants = 3i32;
pub type SelAlignmentConstants = i32;
pub const rtfLeft: SelAlignmentConstants = 0i32;
pub const rtfRight: SelAlignmentConstants = 1i32;
pub const rtfCenter: SelAlignmentConstants = 2i32;
pub type SelectionHitResult = i32;
pub const SHR_None: SelectionHitResult = 0i32;
pub const SHR_NW: SelectionHitResult = 1i32;
pub const SHR_SE: SelectionHitResult = 2i32;
pub const SHR_NE: SelectionHitResult = 3i32;
pub const SHR_SW: SelectionHitResult = 4i32;
pub const SHR_E: SelectionHitResult = 5i32;
pub const SHR_W: SelectionHitResult = 6i32;
pub const SHR_N: SelectionHitResult = 7i32;
pub const SHR_S: SelectionHitResult = 8i32;
pub const SHR_Selection: SelectionHitResult = 9i32;
pub const SketchInk: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 4029223041,
data2: 59516,
data3: 19975,
data4: [151, 218, 160, 160, 55, 97, 229, 134],
};
pub const StrokeBuilder: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 3893415655,
data2: 28241,
data3: 19632,
data4: [170, 58, 11, 152, 91, 112, 218, 247],
};
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct StylusInfo {
pub tcid: u32,
pub cid: u32,
pub bIsInvertedCursor: super::super::Foundation::BOOL,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for StylusInfo {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for StylusInfo {
fn clone(&self) -> Self {
*self
}
}
pub type StylusQueue = i32;
pub const SyncStylusQueue: StylusQueue = 1i32;
pub const AsyncStylusQueueImmediate: StylusQueue = 2i32;
pub const AsyncStylusQueue: StylusQueue = 3i32;
pub const TABLET_DISABLE_FLICKFALLBACKKEYS: u32 = 1048576u32;
pub const TABLET_DISABLE_FLICKS: u32 = 65536u32;
pub const TABLET_DISABLE_PENBARRELFEEDBACK: u32 = 16u32;
pub const TABLET_DISABLE_PENTAPFEEDBACK: u32 = 8u32;
pub const TABLET_DISABLE_PRESSANDHOLD: u32 = 1u32;
pub const TABLET_DISABLE_SMOOTHSCROLLING: u32 = 524288u32;
pub const TABLET_DISABLE_TOUCHSWITCH: u32 = 32768u32;
pub const TABLET_DISABLE_TOUCHUIFORCEOFF: u32 = 512u32;
pub const TABLET_DISABLE_TOUCHUIFORCEON: u32 = 256u32;
pub const TABLET_ENABLE_FLICKLEARNINGMODE: u32 = 262144u32;
pub const TABLET_ENABLE_FLICKSONCONTEXT: u32 = 131072u32;
pub const TABLET_ENABLE_MULTITOUCHDATA: u32 = 16777216u32;
pub type TabletDeviceKind = i32;
pub const TDK_Mouse: TabletDeviceKind = 0i32;
pub const TDK_Pen: TabletDeviceKind = 1i32;
pub const TDK_Touch: TabletDeviceKind = 2i32;
pub type TabletHardwareCapabilities = i32;
pub const THWC_Integrated: TabletHardwareCapabilities = 1i32;
pub const THWC_CursorMustTouch: TabletHardwareCapabilities = 2i32;
pub const THWC_HardProximity: TabletHardwareCapabilities = 4i32;
pub const THWC_CursorsHavePhysicalIds: TabletHardwareCapabilities = 8i32;
pub type TabletPropertyMetricUnit = i32;
pub const TPMU_Default: TabletPropertyMetricUnit = 0i32;
pub const TPMU_Inches: TabletPropertyMetricUnit = 1i32;
pub const TPMU_Centimeters: TabletPropertyMetricUnit = 2i32;
pub const TPMU_Degrees: TabletPropertyMetricUnit = 3i32;
pub const TPMU_Radians: TabletPropertyMetricUnit = 4i32;
pub const TPMU_Seconds: TabletPropertyMetricUnit = 5i32;
pub const TPMU_Pounds: TabletPropertyMetricUnit = 6i32;
pub const TPMU_Grams: TabletPropertyMetricUnit = 7i32;
pub const TextInputPanel: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 4189161943, data2: 8843, data3: 20267, data4: [134, 80, 185, 127, 89, 224, 44, 140] };
pub const TipAutoCompleteClient: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 2155617900,
data2: 7424,
data3: 17727,
data4: [185, 32, 182, 27, 183, 205, 217, 151],
};
pub type VisualState = i32;
pub const InPlace: VisualState = 0i32;
pub const Floating: VisualState = 1i32;
pub const DockedTop: VisualState = 2i32;
pub const DockedBottom: VisualState = 3i32;
pub const Closed: VisualState = 4i32;
pub const WM_TABLET_ADDED: u32 = 712u32;
pub const WM_TABLET_DEFBASE: u32 = 704u32;
pub const WM_TABLET_DELETED: u32 = 713u32;
pub const WM_TABLET_FLICK: u32 = 715u32;
pub const WM_TABLET_MAXOFFSET: u32 = 32u32;
pub const WM_TABLET_QUERYSYSTEMGESTURESTATUS: u32 = 716u32;
pub type _IInkCollectorEvents = *mut ::core::ffi::c_void;
pub type _IInkEditEvents = *mut ::core::ffi::c_void;
pub type _IInkEvents = *mut ::core::ffi::c_void;
pub type _IInkOverlayEvents = *mut ::core::ffi::c_void;
pub type _IInkPictureEvents = *mut ::core::ffi::c_void;
pub type _IInkRecognitionEvents = *mut ::core::ffi::c_void;
pub type _IInkStrokesEvents = *mut ::core::ffi::c_void;
pub type _IMathInputControlEvents = *mut ::core::ffi::c_void;
pub type _IPenInputPanelEvents = *mut ::core::ffi::c_void;
pub type enumGetCandidateFlags = i32;
pub const TCF_ALLOW_RECOGNITION: enumGetCandidateFlags = 1i32;
pub const TCF_FORCE_RECOGNITION: enumGetCandidateFlags = 2i32;
pub type enumINKMETRIC_FLAGS = i32;
pub const IMF_FONT_SELECTED_IN_HDC: enumINKMETRIC_FLAGS = 1i32;
pub const IMF_ITALIC: enumINKMETRIC_FLAGS = 2i32;
pub const IMF_BOLD: enumINKMETRIC_FLAGS = 4i32;
pub type enumRECO_TYPE = i32;
pub const RECO_TYPE_WSTRING: enumRECO_TYPE = 0i32;
pub const RECO_TYPE_WCHAR: enumRECO_TYPE = 1i32;
|
use std::collections::BTreeMap;
pub fn transform(h: &BTreeMap<i32, Vec<char>>) -> BTreeMap<char, i32> {
let mut map: BTreeMap<char, i32> = BTreeMap::new();
for (score, letters) in h.iter() {
for letter in letters {
map.insert(letter.to_ascii_lowercase(), *score);
}
}
return map;
}
|
#![allow(non_snake_case)]
use crate::{builtins::PyModule, PyRef, VirtualMachine};
pub(crate) fn make_module(vm: &VirtualMachine) -> PyRef<PyModule> {
let module = winreg::make_module(vm);
macro_rules! add_constants {
($($name:ident),*$(,)?) => {
extend_module!(vm, &module, {
$((stringify!($name)) => vm.new_pyobj(::winreg::enums::$name as usize)),*
})
};
}
add_constants!(
HKEY_CLASSES_ROOT,
HKEY_CURRENT_USER,
HKEY_LOCAL_MACHINE,
HKEY_USERS,
HKEY_PERFORMANCE_DATA,
HKEY_CURRENT_CONFIG,
HKEY_DYN_DATA,
);
module
}
#[pymodule]
mod winreg {
use crate::common::lock::{PyRwLock, PyRwLockReadGuard, PyRwLockWriteGuard};
use crate::{
builtins::PyStrRef, convert::ToPyException, PyObjectRef, PyPayload, PyRef, PyResult,
TryFromObject, VirtualMachine,
};
use ::winreg::{enums::RegType, RegKey, RegValue};
use std::{ffi::OsStr, io};
use winapi::shared::winerror;
// access rights
#[pyattr]
pub use winapi::um::winnt::{
KEY_ALL_ACCESS, KEY_CREATE_LINK, KEY_CREATE_SUB_KEY, KEY_ENUMERATE_SUB_KEYS, KEY_EXECUTE,
KEY_NOTIFY, KEY_QUERY_VALUE, KEY_READ, KEY_SET_VALUE, KEY_WOW64_32KEY, KEY_WOW64_64KEY,
KEY_WRITE,
};
// value types
#[pyattr]
pub use winapi::um::winnt::{
REG_BINARY, REG_DWORD, REG_DWORD_BIG_ENDIAN, REG_DWORD_LITTLE_ENDIAN, REG_EXPAND_SZ,
REG_FULL_RESOURCE_DESCRIPTOR, REG_LINK, REG_MULTI_SZ, REG_NONE, REG_QWORD,
REG_QWORD_LITTLE_ENDIAN, REG_RESOURCE_LIST, REG_RESOURCE_REQUIREMENTS_LIST, REG_SZ,
};
#[pyattr]
#[pyclass(module = "winreg", name = "HKEYType")]
#[derive(Debug, PyPayload)]
struct PyHkey {
key: PyRwLock<RegKey>,
}
type PyHkeyRef = PyRef<PyHkey>;
// TODO: fix this
unsafe impl Sync for PyHkey {}
impl PyHkey {
fn new(key: RegKey) -> Self {
Self {
key: PyRwLock::new(key),
}
}
fn key(&self) -> PyRwLockReadGuard<'_, RegKey> {
self.key.read()
}
fn key_mut(&self) -> PyRwLockWriteGuard<'_, RegKey> {
self.key.write()
}
}
#[pyclass]
impl PyHkey {
#[pymethod]
fn Close(&self) {
let null_key = RegKey::predef(0 as ::winreg::HKEY);
let key = std::mem::replace(&mut *self.key_mut(), null_key);
drop(key);
}
#[pymethod]
fn Detach(&self) -> usize {
let null_key = RegKey::predef(0 as ::winreg::HKEY);
let key = std::mem::replace(&mut *self.key_mut(), null_key);
let handle = key.raw_handle();
std::mem::forget(key);
handle as usize
}
#[pymethod(magic)]
fn bool(&self) -> bool {
!self.key().raw_handle().is_null()
}
#[pymethod(magic)]
fn enter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
#[pymethod(magic)]
fn exit(&self, _cls: PyObjectRef, _exc: PyObjectRef, _tb: PyObjectRef) {
self.Close();
}
}
enum Hkey {
PyHkey(PyHkeyRef),
Constant(::winreg::HKEY),
}
impl TryFromObject for Hkey {
fn try_from_object(vm: &VirtualMachine, obj: PyObjectRef) -> PyResult<Self> {
obj.downcast().map(Self::PyHkey).or_else(|o| {
usize::try_from_object(vm, o).map(|i| Self::Constant(i as ::winreg::HKEY))
})
}
}
impl Hkey {
fn with_key<R>(&self, f: impl FnOnce(&RegKey) -> R) -> R {
match self {
Self::PyHkey(py) => f(&py.key()),
Self::Constant(hkey) => {
let k = RegKey::predef(*hkey);
let res = f(&k);
std::mem::forget(k);
res
}
}
}
fn into_key(self) -> RegKey {
let k = match self {
Self::PyHkey(py) => py.key().raw_handle(),
Self::Constant(k) => k,
};
RegKey::predef(k)
}
}
#[derive(FromArgs)]
struct OpenKeyArgs {
key: Hkey,
sub_key: Option<PyStrRef>,
#[pyarg(any, default = "0")]
reserved: i32,
#[pyarg(any, default = "::winreg::enums::KEY_READ")]
access: u32,
}
#[pyfunction(name = "OpenKeyEx")]
#[pyfunction]
fn OpenKey(args: OpenKeyArgs, vm: &VirtualMachine) -> PyResult<PyHkey> {
let OpenKeyArgs {
key,
sub_key,
reserved,
access,
} = args;
if reserved != 0 {
// RegKey::open_subkey* doesn't have a reserved param, so this'll do
return Err(vm.new_value_error("reserved param must be 0".to_owned()));
}
let sub_key = sub_key.as_ref().map_or("", |s| s.as_str());
let key = key
.with_key(|k| k.open_subkey_with_flags(sub_key, access))
.map_err(|e| e.to_pyexception(vm))?;
Ok(PyHkey::new(key))
}
#[pyfunction]
fn QueryValue(key: Hkey, subkey: Option<PyStrRef>, vm: &VirtualMachine) -> PyResult<String> {
let subkey = subkey.as_ref().map_or("", |s| s.as_str());
key.with_key(|k| k.get_value(subkey))
.map_err(|e| e.to_pyexception(vm))
}
#[pyfunction]
fn QueryValueEx(
key: Hkey,
subkey: Option<PyStrRef>,
vm: &VirtualMachine,
) -> PyResult<(PyObjectRef, usize)> {
let subkey = subkey.as_ref().map_or("", |s| s.as_str());
let regval = key
.with_key(|k| k.get_raw_value(subkey))
.map_err(|e| e.to_pyexception(vm))?;
#[allow(clippy::redundant_clone)]
let ty = regval.vtype.clone() as usize;
Ok((reg_to_py(regval, vm)?, ty))
}
#[pyfunction]
fn EnumKey(key: Hkey, index: u32, vm: &VirtualMachine) -> PyResult<String> {
key.with_key(|k| k.enum_keys().nth(index as usize))
.unwrap_or_else(|| {
Err(io::Error::from_raw_os_error(
winerror::ERROR_NO_MORE_ITEMS as i32,
))
})
.map_err(|e| e.to_pyexception(vm))
}
#[pyfunction]
fn EnumValue(
key: Hkey,
index: u32,
vm: &VirtualMachine,
) -> PyResult<(String, PyObjectRef, usize)> {
let (name, value) = key
.with_key(|k| k.enum_values().nth(index as usize))
.unwrap_or_else(|| {
Err(io::Error::from_raw_os_error(
winerror::ERROR_NO_MORE_ITEMS as i32,
))
})
.map_err(|e| e.to_pyexception(vm))?;
#[allow(clippy::redundant_clone)]
let ty = value.vtype.clone() as usize;
Ok((name, reg_to_py(value, vm)?, ty))
}
#[pyfunction]
fn CloseKey(key: Hkey) {
match key {
Hkey::PyHkey(py) => py.Close(),
Hkey::Constant(hkey) => drop(RegKey::predef(hkey)),
}
}
#[pyfunction]
fn CreateKey(key: Hkey, subkey: Option<PyStrRef>, vm: &VirtualMachine) -> PyResult<PyHkey> {
let k = match subkey {
Some(subkey) => {
let (k, _disp) = key
.with_key(|k| k.create_subkey(subkey.as_str()))
.map_err(|e| e.to_pyexception(vm))?;
k
}
None => key.into_key(),
};
Ok(PyHkey::new(k))
}
#[pyfunction]
fn SetValue(
key: Hkey,
subkey: Option<PyStrRef>,
typ: u32,
value: PyStrRef,
vm: &VirtualMachine,
) -> PyResult<()> {
if typ != REG_SZ {
return Err(vm.new_type_error("type must be winreg.REG_SZ".to_owned()));
}
let subkey = subkey.as_ref().map_or("", |s| s.as_str());
key.with_key(|k| k.set_value(subkey, &OsStr::new(value.as_str())))
.map_err(|e| e.to_pyexception(vm))
}
#[pyfunction]
fn DeleteKey(key: Hkey, subkey: PyStrRef, vm: &VirtualMachine) -> PyResult<()> {
key.with_key(|k| k.delete_subkey(subkey.as_str()))
.map_err(|e| e.to_pyexception(vm))
}
fn reg_to_py(value: RegValue, vm: &VirtualMachine) -> PyResult {
macro_rules! bytes_to_int {
($int:ident, $f:ident, $name:ident) => {{
let i = if value.bytes.is_empty() {
Ok(0 as $int)
} else {
(&*value.bytes).try_into().map($int::$f).map_err(|_| {
vm.new_value_error(format!("{} value is wrong length", stringify!(name)))
})
};
i.map(|i| vm.ctx.new_int(i).into())
}};
}
let bytes_to_wide = |b: &[u8]| -> Option<&[u16]> {
if b.len() % 2 == 0 {
Some(unsafe { std::slice::from_raw_parts(b.as_ptr().cast(), b.len() / 2) })
} else {
None
}
};
match value.vtype {
RegType::REG_DWORD => bytes_to_int!(u32, from_ne_bytes, REG_DWORD),
RegType::REG_DWORD_BIG_ENDIAN => {
bytes_to_int!(u32, from_be_bytes, REG_DWORD_BIG_ENDIAN)
}
RegType::REG_QWORD => bytes_to_int!(u64, from_ne_bytes, REG_DWORD),
// RegType::REG_QWORD_BIG_ENDIAN => bytes_to_int!(u64, from_be_bytes, REG_DWORD_BIG_ENDIAN),
RegType::REG_SZ | RegType::REG_EXPAND_SZ => {
let wide_slice = bytes_to_wide(&value.bytes).ok_or_else(|| {
vm.new_value_error("REG_SZ string doesn't have an even byte length".to_owned())
})?;
let nul_pos = wide_slice
.iter()
.position(|w| *w == 0)
.unwrap_or(wide_slice.len());
let s = String::from_utf16_lossy(&wide_slice[..nul_pos]);
Ok(vm.ctx.new_str(s).into())
}
RegType::REG_MULTI_SZ => {
if value.bytes.is_empty() {
return Ok(vm.ctx.new_list(vec![]).into());
}
let wide_slice = bytes_to_wide(&value.bytes).ok_or_else(|| {
vm.new_value_error(
"REG_MULTI_SZ string doesn't have an even byte length".to_owned(),
)
})?;
let wide_slice = if let Some((0, rest)) = wide_slice.split_last() {
rest
} else {
wide_slice
};
let strings = wide_slice
.split(|c| *c == 0)
.map(|s| vm.new_pyobj(String::from_utf16_lossy(s)))
.collect();
Ok(vm.ctx.new_list(strings).into())
}
_ => {
if value.bytes.is_empty() {
Ok(vm.ctx.none())
} else {
Ok(vm.ctx.new_bytes(value.bytes).into())
}
}
}
}
}
|
/* Copyright (C) 2016 Yutaka Kamei */
extern crate scim;
extern crate rustc_serialize;
use rustc_serialize::json;
use scim::schema::resource::*;
const POST_DATA : &'static str = "{
\"schemas\":[\"urn:ietf:params:scim:schemas:core:2.0:User\"],
\"userName\":\"bjensen\",
\"externalId\":\"bjensen\",
\"name\":{
\"formatted\":\"Ms. Barbara J Jensen III\",
\"familyName\":\"Jensen\",
\"givenName\":\"Barbara\"
}
}
";
#[test]
fn test_eq() {
let mut user1 = User::new("user1");
let user2 = User::new("user2");
let mut user1_dup1 = User::new("user1");
let mut user1_dup2 = User::new("user1");
assert!(user1 != user2);
assert!(user1 == user1_dup1);
assert!(user1 == user1_dup2);
user1_dup1.title = Some(Title("President".to_string()));
assert!(user1 != user1_dup1);
user1.emails = Some(vec![
Email {
typ: Some("home".to_string()),
primary: Some(false),
value: Some("user1@example.com".to_string()),
},
]);
user1_dup2.emails = Some(vec![
Email {
typ: Some("home".to_string()),
primary: Some(false),
value: Some("user1@example.com".to_string()),
},
]);
assert!(user1 == user1_dup2);
if let Some(ref mut emails) = user1_dup2.emails {
emails[0].primary = Some(true);
}
assert!(user1 != user1_dup2);
}
#[test]
fn test_json() {
let user : User = json::decode(POST_DATA).unwrap();
assert_eq!(user.userName, UserName("bjensen".to_string()));
assert_eq!(user.name.unwrap(), Name {
formatted: Some("Ms. Barbara J Jensen III".to_string()),
familyName: Some("Jensen".to_string()),
givenName: Some("Barbara".to_string()),
middleName: None,
honorificPrefix: None,
honorificSuffix: None,
});
}
|
import sys;
import ptr;
import unsafe;
export _chan;
export _port;
export mk_port;
native "rust" mod rustrt {
type void;
type rust_chan;
type rust_port;
fn new_chan(po : *rust_port) -> *rust_chan;
fn del_chan(ch : *rust_chan);
fn drop_chan(ch : *rust_chan);
fn chan_send(ch: *rust_chan, v : *void);
fn new_port(unit_sz : uint) -> *rust_port;
fn del_port(po : *rust_port);
fn drop_port(po : *rust_port);
fn port_recv(dp : *void, po : *rust_port);
}
resource chan_ptr(ch: *rustrt::rust_chan) {
rustrt::drop_chan(ch);
rustrt::drop_chan(ch); // FIXME: We shouldn't have to do this
// twice.
rustrt::del_chan(ch);
}
resource port_ptr(po: *rustrt::rust_port) {
rustrt::drop_port(po);
rustrt::del_port(po);
}
obj _chan[T](raw_chan : @chan_ptr) {
fn send(v : &T) {
rustrt::chan_send(**raw_chan,
unsafe::reinterpret_cast(ptr::addr_of(v)));
}
}
obj _port[T](raw_port : @port_ptr) {
fn mk_chan() -> _chan[T] {
_chan(@chan_ptr(rustrt::new_chan(**raw_port)))
}
fn recv_into(v : &T) {
rustrt::port_recv(unsafe::reinterpret_cast(ptr::addr_of(v)),
**raw_port);
}
}
fn mk_port[T]() -> _port[T] {
_port(@port_ptr(rustrt::new_port(sys::size_of[T]())))
}
|
use proc_macro2::Group;
use quote::ToTokens;
use syn::{
parse::Nothing,
visit_mut::{self, VisitMut},
*,
};
#[cfg(feature = "try_trait")]
use crate::utils::expr_call;
use crate::utils::{expr_unimplemented, replace_expr, Attrs, AttrsMut};
use super::{Context, VisitMode, DEFAULT_MARKER, NAME, NEVER};
// =================================================================================================
// Visitor
#[derive(Clone, Copy, Default)]
struct Scope {
/// in closures
closure: bool,
/// in try blocks
try_block: bool,
/// in the other `auto_enum` attributes
foreign: bool,
}
pub(super) struct Visitor<'a> {
cx: &'a mut Context,
scope: Scope,
}
impl<'a> Visitor<'a> {
pub(super) fn new(cx: &'a mut Context) -> Self {
Self { cx, scope: Scope::default() }
}
fn find_remove_attrs(&mut self, attrs: &mut impl AttrsMut) {
if !self.scope.foreign {
super::EMPTY_ATTRS.iter().for_each(|ident| {
if let Some(attr) = attrs.find_remove_attr(ident) {
if let Err(e) = syn::parse2::<Nothing>(attr.tokens) {
self.cx.diagnostic.error(e);
}
}
});
if let Some(old) = attrs.find_remove_attr(super::NESTED_OLD) {
self.cx.diagnostic.error(error!(
old,
"#[{}] has been removed and replaced with #[{}]",
super::NESTED_OLD,
super::NESTED
));
}
}
}
/// `return` in functions or closures
fn visit_return(&mut self, node: &mut Expr) {
debug_assert!(self.cx.visit_mode == VisitMode::Return);
if !self.scope.closure && !node.any_empty_attr(NEVER) {
// Desugar `return <expr>` into `return Enum::VariantN(<expr>)`.
if let Expr::Return(ExprReturn { expr, .. }) = node {
self.cx.replace_boxed_expr(expr);
}
}
}
/// `?` operator in functions or closures
fn visit_try(&mut self, node: &mut Expr) {
debug_assert!(self.cx.visit_mode == VisitMode::Try);
if !self.scope.try_block && !self.scope.closure && !node.any_empty_attr(NEVER) {
match &node {
// https://github.com/rust-lang/rust/blob/1.35.0/src/librustc/hir/lowering.rs#L4578-L4682
// Desugar `ExprKind::Try`
// from: `<expr>?`
Expr::Try(ExprTry { expr, .. })
// Skip if `<expr>` is a marker macro.
if !self.cx.is_marker_expr(&**expr) =>
{
// into:
//
// match // If "try_trait" feature enabled
// Try::into_result(<expr>)
// // Otherwise
// <expr>
// {
// Ok(val) => val,
// Err(err) => // If "try_trait" feature enabled
// return Try::from_error(Enum::VariantN(err)),
// // Otherwise
// return Err(Enum::VariantN(err)),
// }
replace_expr(node, |expr| {
#[allow(unused_mut)]
let ExprTry { attrs, mut expr, .. } =
if let Expr::Try(expr) = expr { expr } else { unreachable!() };
#[cfg(feature = "try_trait")]
replace_expr(&mut *expr, |expr| {
expr_call(
Vec::new(),
syn::parse_quote!(::core::ops::Try::into_result),
expr,
)
});
let mut arms = Vec::with_capacity(2);
arms.push(syn::parse_quote!(::core::result::Result::Ok(val) => val,));
let err = self.cx.next_expr(syn::parse_quote!(err));
#[cfg(feature = "try_trait")]
arms.push(syn::parse_quote!(::core::result::Result::Err(err) => return ::core::ops::Try::from_error(#err),));
#[cfg(not(feature = "try_trait"))]
arms.push(syn::parse_quote!(::core::result::Result::Err(err) => return ::core::result::Result::Err(#err),));
Expr::Match(ExprMatch {
attrs,
match_token: token::Match::default(),
expr,
brace_token: token::Brace::default(),
arms,
})
})
}
_ => {}
}
}
}
/// Expression level marker (`marker!` macro)
fn visit_marker_macro(&mut self, node: &mut Expr) {
debug_assert!(!self.scope.foreign || self.cx.marker != DEFAULT_MARKER);
match &node {
// Desugar `marker!(<expr>)` into `Enum::VariantN(<expr>)`.
Expr::Macro(ExprMacro { mac, .. })
// Skip if `marker!` is not a marker macro.
if self.cx.is_marker_macro_exact(mac) =>
{
replace_expr(node, |expr| {
let expr = if let Expr::Macro(expr) = expr { expr } else { unreachable!() };
let args = syn::parse2(expr.mac.tokens).unwrap_or_else(|e| {
self.cx.diagnostic.error(e);
expr_unimplemented()
});
if self.cx.failed() {
args
} else {
self.cx.next_expr_with_attrs(expr.attrs, args)
}
})
}
_ => {}
}
}
}
impl VisitMut for Visitor<'_> {
fn visit_expr_mut(&mut self, node: &mut Expr) {
if !self.cx.failed() {
match node {
Expr::Closure(_) => self.scope.closure = true,
// `?` operator in try blocks are not supported.
Expr::TryBlock(_) => self.scope.try_block = true,
_ => {}
}
match self.cx.visit_mode {
VisitMode::Return => self.visit_return(node),
VisitMode::Try => self.visit_try(node),
VisitMode::Default => {}
}
visit_mut::visit_expr_mut(self, node);
if !self.scope.foreign || self.cx.marker != DEFAULT_MARKER {
self.visit_marker_macro(node);
self.find_remove_attrs(node);
}
}
}
fn visit_arm_mut(&mut self, node: &mut Arm) {
if !self.cx.failed() {
visit_mut::visit_arm_mut(self, node);
self.find_remove_attrs(node);
}
}
fn visit_local_mut(&mut self, node: &mut Local) {
if !self.cx.failed() {
visit_mut::visit_local_mut(self, node);
self.find_remove_attrs(node);
}
}
fn visit_stmt_mut(&mut self, node: &mut Stmt) {
if !self.cx.failed() {
let tmp = self.scope;
if node.any_attr(NAME) {
self.scope.foreign = true;
// Record whether other `auto_enum` attribute exists.
self.cx.other_attr = true;
}
visit_stmt(node, self, |this| this.cx);
self.scope = tmp;
}
}
fn visit_item_mut(&mut self, _: &mut Item) {
// Do not recurse into nested items.
}
}
// =================================================================================================
// FindTry
/// Find `?` operator.
pub(super) struct FindTry<'a> {
cx: &'a Context,
scope: Scope,
pub(super) has: bool,
}
impl<'a> FindTry<'a> {
pub(super) fn new(cx: &'a Context) -> Self {
Self { cx, scope: Scope::default(), has: false }
}
}
impl VisitMut for FindTry<'_> {
fn visit_expr_mut(&mut self, node: &mut Expr) {
let tmp = self.scope;
if let Expr::Closure(_) = &node {
self.scope.closure = true;
}
if !self.scope.closure && !node.any_empty_attr(NEVER) {
if let Expr::Try(ExprTry { expr, .. }) = node {
// Skip if `<expr>` is a marker macro.
if !self.cx.is_marker_expr(&**expr) {
self.has = true;
}
}
}
if node.any_attr(NAME) {
self.scope.foreign = true;
}
if !self.has {
visit_mut::visit_expr_mut(self, node);
}
self.scope = tmp;
}
fn visit_local_mut(&mut self, node: &mut Local) {
let tmp = self.scope;
if node.any_attr(NAME) {
self.scope.foreign = true;
}
visit_mut::visit_local_mut(self, node);
self.scope = tmp;
}
fn visit_item_mut(&mut self, _: &mut Item) {
// Do not recurse into nested items.
}
}
// =================================================================================================
// Dummy visitor
pub(super) struct Dummy<'a> {
cx: &'a mut Context,
}
impl<'a> Dummy<'a> {
pub(super) fn new(cx: &'a mut Context) -> Self {
Self { cx }
}
}
impl VisitMut for Dummy<'_> {
fn visit_stmt_mut(&mut self, node: &mut Stmt) {
if !self.cx.failed() {
if node.any_attr(NAME) {
self.cx.other_attr = true;
}
visit_stmt(node, self, |this| this.cx);
}
}
fn visit_item_mut(&mut self, _: &mut Item) {
// Do not recurse into nested items.
}
}
fn visit_stmt<V>(node: &mut Stmt, visitor: &mut V, f: impl Fn(&mut V) -> &mut Context)
where
V: VisitMut,
{
let attr = match node {
Stmt::Expr(expr) | Stmt::Semi(expr, _) => expr.find_remove_attr(NAME),
Stmt::Local(local) => local.find_remove_attr(NAME),
// Do not recurse into nested items.
Stmt::Item(_) => None,
};
if let Some(Attribute { tokens, .. }) = attr {
let res = syn::parse2::<Group>(tokens)
.and_then(|group| f(visitor).make_child(node.to_token_stream(), group.stream()));
visit_mut::visit_stmt_mut(visitor, node);
match res {
Err(e) => {
f(visitor).diagnostic.error(e);
*node = Stmt::Expr(expr_unimplemented());
}
Ok(mut cx) => {
super::expand_parent_stmt(node, &mut cx).unwrap_or_else(|e| {
cx.diagnostic.error(e);
*node = Stmt::Expr(expr_unimplemented());
});
f(visitor).join_child(cx)
}
}
} else {
visit_mut::visit_stmt_mut(visitor, node);
}
}
|
extern crate rustc_serialize;
extern crate docopt;
extern crate walkdir;
extern crate pulldown_cmark;
extern crate mustache;
extern crate yaml_rust;
extern crate virgil;
const USAGE: &'static str = "
Virgil - a rusty static site generator.
Usage:
virgil init [-v] [-p <path>]
virgil post [-v] [-p <path>] <file>
virgil page [-v] [-p <path>] <file>
virgil [build] [-v] [-p <path>]
virgil serve [-v] [-p <path>]
Options:
-p <path> Run in directory <path>
-v Display debug info
";
const INIT_WELCOME_MESSAGE: &'static str = "\
New virgil site initialized. Create some markdown files then run `virgil` to \
build your site.\
";
#[derive(Debug, RustcDecodable)]
struct Args {
arg_file: String,
cmd_init: bool,
cmd_build: bool,
cmd_serve: bool,
cmd_post: bool,
cmd_page: bool,
flag_p: Option<String>,
flag_v: bool
}
fn main() {
use docopt::Docopt;
use std::path::Path;
use std::process;
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
if args.flag_v {
println!("{:?}", args);
}
let path_string = args.flag_p.unwrap_or("./".to_string());
let path = Path::new(&path_string);
if args.cmd_init {
match virgil::init::init_folder(path) {
Ok(_) => println!("{}", INIT_WELCOME_MESSAGE),
Err(msg) => println!("{}", msg)
}
} else {
let config_maybe = virgil::config::read(path);
if config_maybe.is_err() {
println!("This isn't a Virgil site, did you mean `virgil init`?");
process::exit(-1);
}
let config = config_maybe.unwrap();
if args.cmd_serve {
println!("This command is not yet implemented.");
} else if args.cmd_post {
println!("This command is not yet implemented.");
} else if args.cmd_page {
println!("This command is not yet implemented.");
} else /*if args.cmd_build*/ {
virgil::builders::build_all(path, &config).unwrap();
}
}
}
|
use crate::cpp_data::{CppBaseSpecifier, CppItem, CppPath, CppPathItem};
use crate::cpp_ffi_data::CppCast;
use crate::cpp_function::{CppFunction, CppFunctionArgument};
use crate::cpp_type::{CppPointerLikeTypeKind, CppType};
use crate::database::ItemWithSource;
use crate::processor::ProcessorData;
use ritual_common::errors::Result;
/// Convenience function to create `CppMethod` object for
/// `static_cast` or `dynamic_cast` from type `from` to type `to`.
/// See `CppMethod`'s documentation for more information
/// about `is_unsafe_static_cast` and `is_direct_static_cast`.
fn create_cast_method(cast: CppCast, from: &CppType, to: &CppType) -> Result<CppItem> {
let function = CppFunction {
path: CppPath::from_item(CppPathItem {
name: cast.cpp_method_name().into(),
template_arguments: Some(vec![to.clone()]),
}),
member: None,
operator: None,
return_type: to.clone(),
arguments: vec![CppFunctionArgument {
name: "ptr".to_string(),
argument_type: from.clone(),
has_default_value: false,
}],
allows_variadic_arguments: false,
declaration_code: None,
cast: Some(cast),
};
Ok(CppItem::Function(function))
}
/// Performs a portion of `generate_casts` operation.
/// Adds casts between `target_type` and `base_type` and calls
/// `generate_casts_one` recursively to add casts between `target_type`
/// and base types of `base_type`.
fn generate_casts_one(
target_type: &CppPath,
base_type: &CppPath,
direct_base_index: Option<usize>,
data: &ProcessorData<'_>,
) -> Result<Vec<CppItem>> {
let target_ptr_type = CppType::PointerLike {
is_const: false,
kind: CppPointerLikeTypeKind::Pointer,
target: Box::new(CppType::Class(target_type.clone())),
};
let base_ptr_type = CppType::PointerLike {
is_const: false,
kind: CppPointerLikeTypeKind::Pointer,
target: Box::new(CppType::Class(base_type.clone())),
};
let mut new_methods = vec![
create_cast_method(
CppCast::Static {
is_unsafe: true,
base_index: direct_base_index,
},
&base_ptr_type,
&target_ptr_type,
)?,
create_cast_method(
CppCast::Static {
is_unsafe: false,
base_index: direct_base_index,
},
&target_ptr_type,
&base_ptr_type,
)?,
create_cast_method(CppCast::Dynamic, &base_ptr_type, &target_ptr_type)?,
];
for item in data.db.all_cpp_items().filter_map(|i| i.item.as_base_ref()) {
if &item.derived_class_type == base_type {
new_methods.extend(generate_casts_one(
target_type,
&item.base_class_type,
None,
data,
)?);
}
}
Ok(new_methods)
}
/// Adds `static_cast` and `dynamic_cast` functions for all appropriate pairs of types
/// in this `CppData`.
fn generate_casts(base: &CppBaseSpecifier, data: &ProcessorData<'_>) -> Result<Vec<CppItem>> {
generate_casts_one(
&base.derived_class_type,
&base.base_class_type,
Some(base.base_index),
data,
)
}
pub fn run(data: &mut ProcessorData<'_>) -> Result<()> {
let mut results = Vec::new();
let bases = data
.db
.cpp_items()
.filter_map(|item| item.filter_map(|item| item.as_base_ref()));
for item in bases {
for value in generate_casts(item.item, &data)? {
results.push(ItemWithSource::new(&item.id, value));
}
}
for item in results {
data.add_cpp_item(Some(item.source_id), item.item)?;
}
Ok(())
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - Major git tag version. For example, this firmware was built from git tag ``v2.0.3``, so this value is ``2``."]
pub major: MAJOR,
_reserved1: [u8; 3usize],
#[doc = "0x04 - Minor git tag version. For example, this firmware was built from git tag ``v2.0.3``, so this value is ``0``."]
pub minor: MINOR,
_reserved2: [u8; 3usize],
#[doc = "0x08 - Revision git tag version. For example, this firmware was built from git tag ``v2.0.3``, so this value is ``3``."]
pub revision: REVISION,
_reserved3: [u8; 3usize],
#[doc = "0x0c - Bits 24-31 of `VERSION_GITREV`. First 32-bits of the git revision. This documentation was built from git rev ``00000000``, so this value is 0, which should be enough to check out the exact git version used to build this firmware."]
pub gitrev3: GITREV3,
_reserved4: [u8; 3usize],
#[doc = "0x10 - Bits 16-23 of `VERSION_GITREV`."]
pub gitrev2: GITREV2,
_reserved5: [u8; 3usize],
#[doc = "0x14 - Bits 8-15 of `VERSION_GITREV`."]
pub gitrev1: GITREV1,
_reserved6: [u8; 3usize],
#[doc = "0x18 - Bits 0-7 of `VERSION_GITREV`."]
pub gitrev0: GITREV0,
_reserved7: [u8; 3usize],
#[doc = "0x1c - Bits 8-9 of `VERSION_GITEXTRA`. The number of additional commits beyond the git tag. For example, if this value is ``1``, then the repository this was built from has one additional commit beyond the tag indicated in `MAJOR`, `MINOR`, and `REVISION`."]
pub gitextra1: GITEXTRA1,
_reserved8: [u8; 3usize],
#[doc = "0x20 - Bits 0-7 of `VERSION_GITEXTRA`."]
pub gitextra0: GITEXTRA0,
_reserved9: [u8; 3usize],
#[doc = "0x24 - "]
pub dirty: DIRTY,
_reserved10: [u8; 3usize],
#[doc = "0x28 - "]
pub model: MODEL,
_reserved11: [u8; 3usize],
#[doc = "0x2c - Bits 24-31 of `VERSION_SEED`. 32-bit seed used for the place-and-route."]
pub seed3: SEED3,
_reserved12: [u8; 3usize],
#[doc = "0x30 - Bits 16-23 of `VERSION_SEED`."]
pub seed2: SEED2,
_reserved13: [u8; 3usize],
#[doc = "0x34 - Bits 8-15 of `VERSION_SEED`."]
pub seed1: SEED1,
_reserved14: [u8; 3usize],
#[doc = "0x38 - Bits 0-7 of `VERSION_SEED`."]
pub seed0: SEED0,
}
#[doc = "Major git tag version. For example, this firmware was built from git tag ``v2.0.3``, so this value is ``2``.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [major](major) module"]
pub type MAJOR = crate::Reg<u8, _MAJOR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _MAJOR;
#[doc = "`read()` method returns [major::R](major::R) reader structure"]
impl crate::Readable for MAJOR {}
#[doc = "Major git tag version. For example, this firmware was built from git tag ``v2.0.3``, so this value is ``2``."]
pub mod major;
#[doc = "Minor git tag version. For example, this firmware was built from git tag ``v2.0.3``, so this value is ``0``.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [minor](minor) module"]
pub type MINOR = crate::Reg<u8, _MINOR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _MINOR;
#[doc = "`read()` method returns [minor::R](minor::R) reader structure"]
impl crate::Readable for MINOR {}
#[doc = "Minor git tag version. For example, this firmware was built from git tag ``v2.0.3``, so this value is ``0``."]
pub mod minor;
#[doc = "Revision git tag version. For example, this firmware was built from git tag ``v2.0.3``, so this value is ``3``.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [revision](revision) module"]
pub type REVISION = crate::Reg<u8, _REVISION>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _REVISION;
#[doc = "`read()` method returns [revision::R](revision::R) reader structure"]
impl crate::Readable for REVISION {}
#[doc = "Revision git tag version. For example, this firmware was built from git tag ``v2.0.3``, so this value is ``3``."]
pub mod revision;
#[doc = "Bits 24-31 of `VERSION_GITREV`. First 32-bits of the git revision. This documentation was built from git rev ``00000000``, so this value is 0, which should be enough to check out the exact git version used to build this firmware.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [gitrev3](gitrev3) module"]
pub type GITREV3 = crate::Reg<u8, _GITREV3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _GITREV3;
#[doc = "`read()` method returns [gitrev3::R](gitrev3::R) reader structure"]
impl crate::Readable for GITREV3 {}
#[doc = "Bits 24-31 of `VERSION_GITREV`. First 32-bits of the git revision. This documentation was built from git rev ``00000000``, so this value is 0, which should be enough to check out the exact git version used to build this firmware."]
pub mod gitrev3;
#[doc = "Bits 16-23 of `VERSION_GITREV`.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [gitrev2](gitrev2) module"]
pub type GITREV2 = crate::Reg<u8, _GITREV2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _GITREV2;
#[doc = "`read()` method returns [gitrev2::R](gitrev2::R) reader structure"]
impl crate::Readable for GITREV2 {}
#[doc = "Bits 16-23 of `VERSION_GITREV`."]
pub mod gitrev2;
#[doc = "Bits 8-15 of `VERSION_GITREV`.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [gitrev1](gitrev1) module"]
pub type GITREV1 = crate::Reg<u8, _GITREV1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _GITREV1;
#[doc = "`read()` method returns [gitrev1::R](gitrev1::R) reader structure"]
impl crate::Readable for GITREV1 {}
#[doc = "Bits 8-15 of `VERSION_GITREV`."]
pub mod gitrev1;
#[doc = "Bits 0-7 of `VERSION_GITREV`.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [gitrev0](gitrev0) module"]
pub type GITREV0 = crate::Reg<u8, _GITREV0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _GITREV0;
#[doc = "`read()` method returns [gitrev0::R](gitrev0::R) reader structure"]
impl crate::Readable for GITREV0 {}
#[doc = "Bits 0-7 of `VERSION_GITREV`."]
pub mod gitrev0;
#[doc = "Bits 8-9 of `VERSION_GITEXTRA`. The number of additional commits beyond the git tag. For example, if this value is ``1``, then the repository this was built from has one additional commit beyond the tag indicated in `MAJOR`, `MINOR`, and `REVISION`.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [gitextra1](gitextra1) module"]
pub type GITEXTRA1 = crate::Reg<u8, _GITEXTRA1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _GITEXTRA1;
#[doc = "`read()` method returns [gitextra1::R](gitextra1::R) reader structure"]
impl crate::Readable for GITEXTRA1 {}
#[doc = "Bits 8-9 of `VERSION_GITEXTRA`. The number of additional commits beyond the git tag. For example, if this value is ``1``, then the repository this was built from has one additional commit beyond the tag indicated in `MAJOR`, `MINOR`, and `REVISION`."]
pub mod gitextra1;
#[doc = "Bits 0-7 of `VERSION_GITEXTRA`.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [gitextra0](gitextra0) module"]
pub type GITEXTRA0 = crate::Reg<u8, _GITEXTRA0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _GITEXTRA0;
#[doc = "`read()` method returns [gitextra0::R](gitextra0::R) reader structure"]
impl crate::Readable for GITEXTRA0 {}
#[doc = "Bits 0-7 of `VERSION_GITEXTRA`."]
pub mod gitextra0;
#[doc = "\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [dirty](dirty) module"]
pub type DIRTY = crate::Reg<u8, _DIRTY>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DIRTY;
#[doc = "`read()` method returns [dirty::R](dirty::R) reader structure"]
impl crate::Readable for DIRTY {}
#[doc = ""]
pub mod dirty;
#[doc = "\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [model](model) module"]
pub type MODEL = crate::Reg<u8, _MODEL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _MODEL;
#[doc = "`read()` method returns [model::R](model::R) reader structure"]
impl crate::Readable for MODEL {}
#[doc = ""]
pub mod model;
#[doc = "Bits 24-31 of `VERSION_SEED`. 32-bit seed used for the place-and-route.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [seed3](seed3) module"]
pub type SEED3 = crate::Reg<u8, _SEED3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SEED3;
#[doc = "`read()` method returns [seed3::R](seed3::R) reader structure"]
impl crate::Readable for SEED3 {}
#[doc = "Bits 24-31 of `VERSION_SEED`. 32-bit seed used for the place-and-route."]
pub mod seed3;
#[doc = "Bits 16-23 of `VERSION_SEED`.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [seed2](seed2) module"]
pub type SEED2 = crate::Reg<u8, _SEED2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SEED2;
#[doc = "`read()` method returns [seed2::R](seed2::R) reader structure"]
impl crate::Readable for SEED2 {}
#[doc = "Bits 16-23 of `VERSION_SEED`."]
pub mod seed2;
#[doc = "Bits 8-15 of `VERSION_SEED`.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [seed1](seed1) module"]
pub type SEED1 = crate::Reg<u8, _SEED1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SEED1;
#[doc = "`read()` method returns [seed1::R](seed1::R) reader structure"]
impl crate::Readable for SEED1 {}
#[doc = "Bits 8-15 of `VERSION_SEED`."]
pub mod seed1;
#[doc = "Bits 0-7 of `VERSION_SEED`.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [seed0](seed0) module"]
pub type SEED0 = crate::Reg<u8, _SEED0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SEED0;
#[doc = "`read()` method returns [seed0::R](seed0::R) reader structure"]
impl crate::Readable for SEED0 {}
#[doc = "Bits 0-7 of `VERSION_SEED`."]
pub mod seed0;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.