text stringlengths 8 4.13M |
|---|
use std::io::Read;
fn main() {
let mut input = String::new();
std::io::stdin().read_to_string(&mut input).unwrap();
// Changing this to usize works, but more than doubles the runtime
type ItemType = u32;
const FULL_CUP_COUNT: ItemType = 1000000;
let initial_cups: Vec<_> = input.trim().chars().map(|digit| digit.to_digit(10).unwrap() as ItemType).collect();
// Lookup table for the next cup after each cup
let mut cups: Vec<_> = (0..FULL_CUP_COUNT + 1).map(|x| x + 1).collect();
let mut current_cup = initial_cups[0];
*cups.last_mut().unwrap() = current_cup;
for (i, &cup) in initial_cups.iter().enumerate() {
cups[cup as usize] = if i == initial_cups.len() - 1 {
if FULL_CUP_COUNT as usize > initial_cups.len() {
(initial_cups.len() + 1) as ItemType
} else {
current_cup
}
} else {
initial_cups[i + 1]
}
}
for _ in 0..10000000 {
let first_removed = cups[current_cup as usize];
let last_removed = (0..2).fold(first_removed, |acc, _| cups[acc as usize]);
cups[current_cup as usize] = cups[last_removed as usize];
let destination = (current_cup - 1..current_cup + FULL_CUP_COUNT - 1).rev().find_map(|i| {
let cup = i % FULL_CUP_COUNT + 1;
if (0..3).try_fold(first_removed, |acc, _| {
if cup == acc {
Err(())
} else {
Ok(cups[acc as usize])
}
}).is_ok() {
Some(cup)
} else {
None
}
}).unwrap();
cups[last_removed as usize] = cups[destination as usize];
cups[destination as usize] = first_removed;
current_cup = cups[current_cup as usize];
}
// For part 1
// (0..FULL_CUP_COUNT - 1).fold(1, |acc, _| {
// let next = cups[acc as usize];
// print!("{}", next);
// next
// });
// println!("");
println!("{}", (0..2).fold((1, 1), |(cup, product), _| {
let next = cups[cup as usize];
(next, product * next as u64)
}).1);
}
|
/// Find all prime numbers less than `n`.
/// For example, `sieve(7)` should return `[2, 3, 5]`
pub fn sieve(n: u32) -> Vec<u32> {
let mut prime = vec![true; n as usize];
let upper = (n as f64).sqrt() as u32 + 1;
for i in 2..upper {
let mut j = i*i;
while j < n {
prime[j as usize] = false;
j += i;
}
}
let mut sieve: Vec<u32> = Vec::new();
for i in 2..n {
if prime[i as usize] {
sieve.push(i);
}
}
sieve
}
|
// This file is part of linux-epoll. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT. No part of linux-epoll, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2019 The developers of linux-epoll. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT.
#[derive(Debug)]
pub(crate) struct GenericStream<'yielder, SD: SocketData>
{
streaming_socket_file_descriptor: StreamingSocketFileDescriptor<SD>,
input_output_yielder: InputOutputYielder<'yielder>,
byte_counter: ByteCounter,
}
impl<'yielder, SD: SocketData> GenericStream<'yielder, SD>
{
#[inline(always)]
fn tls_handshake(&mut self, tls_session: &mut impl SessionExt) -> Result<(), CompleteError>
{
tls_session.complete_handshaking::<SD>(&self.streaming_socket_file_descriptor, &mut self.input_output_yielder, &mut self.byte_counter)
}
#[inline(always)]
fn tls_read(&mut self, tls_session: &mut impl SessionExt, read_into_buffer: &mut [u8]) -> Result<usize, CompleteError>
{
tls_session.stream_read::<SD>(&self.streaming_socket_file_descriptor, &mut self.input_output_yielder, &mut self.byte_counter, read_into_buffer)
}
#[inline(always)]
fn tls_write(&mut self, tls_session: &mut impl SessionExt, write_from_buffer: &[u8]) -> Result<usize, CompleteError>
{
tls_session.stream_write::<SD>(&self.streaming_socket_file_descriptor, &mut self.input_output_yielder, &mut self.byte_counter, write_from_buffer)
}
#[inline(always)]
fn tls_flush_written_data(&mut self, tls_session: &mut impl SessionExt) -> Result<(), CompleteError>
{
tls_session.stream_flush::<SD>(&self.streaming_socket_file_descriptor, &mut self.input_output_yielder, &mut self.byte_counter)
}
#[inline(always)]
fn tls_finish(&mut self, tls_session: &mut impl SessionExt) -> Result<(), CompleteError>
{
tls_session.stream_close::<SD>(&self.streaming_socket_file_descriptor, &mut self.input_output_yielder, &mut self.byte_counter)
}
}
impl<'yielder, SD: SocketData> GenericStream<'yielder, SD>
{
#[inline(always)]
pub(crate) fn wrap(streaming_socket_file_descriptor: StreamingSocketFileDescriptor<SD>, yielder: Yielder<'yielder, ReactEdgeTriggeredStatus, (), Result<(), CompleteError>>) -> Self
{
Self::new(streaming_socket_file_descriptor, InputOutputYielder::new(yielder), ByteCounter::default())
}
#[inline(always)]
pub(crate) fn unwrap(self) -> (StreamingSocketFileDescriptor<SD>, Yielder<'yielder, ReactEdgeTriggeredStatus, (), Result<(), CompleteError>>)
{
(self.streaming_socket_file_descriptor, self.input_output_yielder)
}
#[inline(always)]
fn new(streaming_socket_file_descriptor: StreamingSocketFileDescriptor<SD>, input_output_yielder: InputOutputYielder<'yielder>, byte_counter: ByteCounter) -> Self
{
Self
{
streaming_socket_file_descriptor,
input_output_yielder,
byte_counter,
}
}
}
|
use std::cmp::min;
use handlebars::Handlebars;
use v_htmlescape::escape;
use super::utils::{self, rematch, Difference};
use crate::{config::Diff2HtmlConfig, parse};
static GENERIC_COLUMN_LINE_NUMBER: &'static str =
include_str!("../templates/generic-column-line-number.hbs");
static GENERIC_EMPTY_DIFF: &'static str = include_str!("../templates/generic-empty-diff.hbs");
static GENERIC_FILE_PATH: &'static str = include_str!("../templates/generic-file-path.hbs");
static GENERIC_LINE: &'static str = include_str!("../templates/generic-line.hbs");
static GENERIC_WRAPPER: &'static str = include_str!("../templates/generic-wrapper.hbs");
static LINE_BY_LINE_FILE_DIFF: &'static str =
include_str!("../templates/line-by-line-file-diff.hbs");
static LINE_BY_LINE_NUMBERS: &'static str = include_str!("../templates/line-by-line-numbers.hbs");
static ICON_FILE: &'static str = include_str!("../templates/icon-file.hbs");
pub struct LineByLinePrinter {
config: Diff2HtmlConfig,
handlebars: Handlebars,
line_matcher: rematch::Rematcher<parse::Line>,
diff_matcher: rematch::Rematcher<Difference>,
}
impl LineByLinePrinter {
pub fn new(config: Diff2HtmlConfig) -> LineByLinePrinter {
let mut handlebars = Handlebars::new();
handlebars
.register_template_string("generic-column-line-number", GENERIC_COLUMN_LINE_NUMBER)
.unwrap();
handlebars
.register_template_string("generic-empty-diff", GENERIC_EMPTY_DIFF)
.unwrap();
handlebars
.register_template_string("generic-file-path", GENERIC_FILE_PATH)
.unwrap();
handlebars
.register_template_string("generic-line", GENERIC_LINE)
.unwrap();
handlebars
.register_template_string("generic-wrapper", GENERIC_WRAPPER)
.unwrap();
handlebars
.register_template_string("line-by-line-numbers", LINE_BY_LINE_NUMBERS)
.unwrap();
handlebars
.register_template_string("line-by-line-file-diff", LINE_BY_LINE_FILE_DIFF)
.unwrap();
LineByLinePrinter {
config: config,
handlebars: handlebars,
line_matcher: utils::get_line_matcher(),
diff_matcher: utils::get_difference_matcher(),
}
}
pub fn render(&self, files: &Vec<parse::File>) -> String {
let output = files
.iter()
.map(|file| {
let diffs = if file.blocks.len() > 0 {
self.generate_file_html(file)
} else {
utils::generate_empty_diff(&self.handlebars, "d2h-code-side-line")
};
self.generate_file_diff_html(file, diffs)
})
.collect::<Vec<String>>()
.join("\n");
self.handlebars
.render(
"generic-wrapper",
&json!({
"content": output,
}),
)
.unwrap()
}
fn generate_file_diff_html(&self, file: &parse::File, diffs: String) -> String {
let file_path = self
.handlebars
.render(
"generic-file-path",
&json!({
"fileDiffName": utils::get_diff_name(file),
"fileIcon": ICON_FILE,
"fileTag": utils::get_line_type_tag(file).to_owned(),
}),
)
.unwrap();
self.handlebars
.render(
"line-by-line-file-diff",
&json!({
"file": file.to_owned(),
"fileHtmlId": utils::get_html_id(file),
"diffs": diffs,
"filePath": file_path,
}),
)
.unwrap()
}
fn generate_file_html(&self, file: &parse::File) -> String {
file.blocks
.iter()
.map(|block| {
let mut lines = utils::make_column_line_number_html(
&self.handlebars,
block.header.as_ref().unwrap(),
"d2h-code-linenumber",
"d2h-code-line",
);
let mut old_lines = Vec::new();
let mut new_lines = Vec::new();
for i in 0..block.lines.len() {
let line = &block.lines[i];
let escaped_line = escape(&line.content).to_string();
if line.line_type != Some(parse::LineType::Inserts)
&& (new_lines.len() > 0
|| (line.line_type != Some(parse::LineType::Deletes)
&& old_lines.len() > 0))
{
self.process_change_block(file, &mut lines, &mut old_lines, &mut new_lines);
}
if line.line_type == Some(parse::LineType::Context) {
lines += &self.generate_line_html(
file.is_combined,
line.line_type.as_ref().unwrap(),
line.old_number,
line.new_number,
escaped_line,
None,
);
} else if line.line_type == Some(parse::LineType::Inserts)
&& old_lines.len() == 0
{
lines += &self.generate_line_html(
file.is_combined,
line.line_type.as_ref().unwrap(),
line.old_number,
line.new_number,
escaped_line,
None,
);
} else if line.line_type == Some(parse::LineType::Deletes) {
old_lines.push(line.to_owned());
} else if line.line_type == Some(parse::LineType::Inserts)
&& old_lines.len() > 0
{
new_lines.push(line.to_owned());
} else {
eprintln!("Unknown state in html line-by-line-generator.");
self.process_change_block(file, &mut lines, &mut old_lines, &mut new_lines);
}
}
self.process_change_block(file, &mut lines, &mut old_lines, &mut new_lines);
lines
})
.collect::<Vec<String>>()
.join("\n")
}
fn process_change_block(
&self,
file: &parse::File,
lines: &mut String,
old_lines: &mut Vec<parse::Line>,
new_lines: &mut Vec<parse::Line>,
) {
let comparisons = old_lines.len() * new_lines.len();
let max_comparisons = 2500;
let do_matching = comparisons < max_comparisons && (self.config.matching != "none");
let old_lines2 = old_lines.to_owned();
let new_lines2 = new_lines.to_owned();
let (matches, insert_type, delete_type) = {
if do_matching {
(
self.line_matcher.matches(&old_lines2, &new_lines2),
parse::LineType::InsertChanges,
parse::LineType::DeleteChanges,
)
} else {
(
vec![vec![old_lines2.as_ref(), new_lines2.as_ref()]],
parse::LineType::Inserts,
parse::LineType::Deletes,
)
}
};
matches.iter().for_each(|item| {
*old_lines = item[0].to_vec();
*new_lines = item[1].to_vec();
let mut processed_old_lines = String::new();
let mut processed_new_lines = String::new();
let common = min(old_lines.len(), new_lines.len());
let mut j = 0;
let mut old_line;
let mut new_line;
while j < common {
old_line = Some(&old_lines[j]);
new_line = Some(&new_lines[j]);
// TODO: hmmm
//self.is_combined = file.is_combined;
let diff = utils::diff_highlight(
&self.config,
Some(&self.diff_matcher),
&old_line.as_ref().unwrap().content,
&new_line.as_ref().unwrap().content,
);
processed_old_lines += &self.generate_line_html(
file.is_combined,
&delete_type,
old_line.as_ref().and_then(|v| v.old_number),
old_line.as_ref().and_then(|v| v.new_number),
diff.first.line,
Some(diff.first.prefix),
);
processed_new_lines += &self.generate_line_html(
file.is_combined,
&insert_type,
new_line.as_ref().and_then(|v| v.old_number),
new_line.as_ref().and_then(|v| v.new_number),
diff.second.line,
Some(diff.second.prefix),
);
j += 1;
}
*lines += &processed_old_lines as &str;
*lines += &processed_new_lines as &str;
*lines +=
&self.process_lines(file.is_combined, &old_lines[common..], &new_lines[common..]);
});
*old_lines = Vec::new();
*new_lines = Vec::new();
}
fn generate_line_html(
&self,
is_combined: bool,
line_type: &parse::LineType,
old_number: Option<usize>,
new_number: Option<usize>,
content: String,
possible_prefix: Option<&str>,
) -> String {
let line_number = self
.handlebars
.render(
"line-by-line-numbers",
&json!({
"oldNumber": old_number,
"newNumber": new_number,
}),
)
.unwrap();
let (prefix, line_without_prefix) = match possible_prefix {
Some(prefix) => (prefix, content),
_ => {
let line_with_prefix = utils::separate_prefix(is_combined, &content);
(line_with_prefix.prefix, line_with_prefix.line.to_owned())
}
};
self.handlebars
.render(
"generic-line",
&json!({
"type": utils::get_line_type_class(line_type).to_owned(),
"lineClass": "d2h-code-linenumber".to_owned(),
"contentClass": "d2h-code-line".to_owned(),
"prefix": prefix.to_owned(),
"content": line_without_prefix,
"lineNumber": line_number,
}),
)
.unwrap()
}
fn process_lines(
&self,
is_combined: bool,
old_lines: &[parse::Line],
new_lines: &[parse::Line],
) -> String {
let mut lines = String::new();
for i in 0..old_lines.len() {
let old_line = &old_lines[i];
let old_escaped_line = escape(&old_line.content);
lines += &self.generate_line_html(
is_combined,
old_line.line_type.as_ref().unwrap(),
old_line.old_number,
old_line.new_number,
old_escaped_line.to_string(),
None,
);
}
for j in 0..new_lines.len() {
let new_line = &new_lines[j];
let new_escaped_line = escape(&new_line.content);
lines += &self.generate_line_html(
is_combined,
new_line.line_type.as_ref().unwrap(),
new_line.old_number,
new_line.new_number,
new_escaped_line.to_string(),
None,
);
}
lines
}
}
|
use byteorder::{BigEndian, WriteBytesExt};
use naia_shared::{
wrapping_diff, ActorType, Event, EventPacketWriter, EventType, LocalActorKey, ManagerType,
Manifest, MTU_SIZE,
};
use super::command_receiver::CommandReceiver;
const MAX_PAST_COMMANDS: u8 = 2;
/// Handles writing of Event & Actor data into an outgoing packet
pub struct ClientPacketWriter {
command_working_bytes: Vec<u8>,
command_count: u8,
event_writer: EventPacketWriter,
}
impl ClientPacketWriter {
/// Construct a new instance of `PacketReader`, the given `buffer` will be
/// used to read information from.
pub fn new() -> ClientPacketWriter {
ClientPacketWriter {
command_working_bytes: Vec::<u8>::new(),
command_count: 0,
event_writer: EventPacketWriter::new(),
}
}
/// Returns whether the writer has bytes to write into the outgoing packet
pub fn has_bytes(&self) -> bool {
return self.command_count != 0 || self.event_writer.has_bytes();
}
/// Gets the bytes to write into an outgoing packet
pub fn get_bytes(&mut self) -> Box<[u8]> {
let mut out_bytes = Vec::<u8>::new();
//Write manager "header" (manager type & actor count)
if self.command_count != 0 {
out_bytes.write_u8(ManagerType::Command as u8).unwrap(); // write manager type
out_bytes.write_u8(self.command_count).unwrap(); // write number of events in the following message
out_bytes.append(&mut self.command_working_bytes); // write event payload
self.command_count = 0;
}
self.event_writer.get_bytes(&mut out_bytes);
out_bytes.into_boxed_slice()
}
/// Get the number of bytes which is ready to be written into an outgoing
/// packet
pub fn bytes_number(&self) -> usize {
return self.command_working_bytes.len() + self.event_writer.bytes_number();
}
/// Writes a Command into the Writer's internal buffer, which will
/// eventually be put into the outgoing packet
pub fn write_command<T: EventType, U: ActorType>(
&mut self,
host_tick: u16,
manifest: &Manifest<T, U>,
command_receiver: &CommandReceiver<T>,
pawn_key: LocalActorKey,
command: &Box<dyn Event<T>>,
) -> bool {
//Write command payload
let mut command_payload_bytes = Vec::<u8>::new();
command.as_ref().write(&mut command_payload_bytes);
// write past commands
let past_commands_number = command_receiver
.command_history_count(pawn_key)
.min(MAX_PAST_COMMANDS);
let mut past_command_index: u8 = 0;
if let Some(mut iter) = command_receiver.command_history_iter(pawn_key, true) {
while past_command_index < past_commands_number {
if let Some((past_tick, past_command)) = iter.next() {
// get tick diff between commands
let diff_i8: i16 = wrapping_diff(past_tick, host_tick);
if diff_i8 > 0 && diff_i8 <= 255 {
// write the tick diff
command_payload_bytes.write_u8(diff_i8 as u8).unwrap();
// write the command payload
past_command.write(&mut command_payload_bytes);
past_command_index += 1;
}
} else {
break;
}
}
}
//Write command "header"
let mut command_total_bytes = Vec::<u8>::new();
let type_id = command.as_ref().get_type_id();
command_total_bytes
.write_u16::<BigEndian>(pawn_key)
.unwrap(); // write pawn key
let naia_id = manifest.get_event_naia_id(&type_id); // get naia id
command_total_bytes.write_u16::<BigEndian>(naia_id).unwrap(); // write naia id
command_total_bytes.write_u8(past_command_index).unwrap(); // write past command number
command_total_bytes.append(&mut command_payload_bytes); // write payload
let mut hypothetical_next_payload_size = self.bytes_number() + command_total_bytes.len();
if self.command_count == 0 {
hypothetical_next_payload_size += 2;
}
if hypothetical_next_payload_size < MTU_SIZE {
self.command_count += 1;
self.command_working_bytes.append(&mut command_total_bytes);
return true;
} else {
return false;
}
}
/// Writes an Event into the Writer's internal buffer, which will eventually
/// be put into the outgoing packet
pub fn write_event<T: EventType, U: ActorType>(
&mut self,
manifest: &Manifest<T, U>,
event: &Box<dyn Event<T>>,
) -> bool {
return self.event_writer.write_event(manifest, event);
}
}
|
// Copyright (c) 2018-2022 Ministerio de Fomento
// Instituto de Ciencias de la Construcción Eduardo Torroja (IETcc-CSIC)
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
// Author(s): Rafael Villar Burke <pachi@ietcc.csic.es>,
// Daniel Jiménez González <dani@ietcc.csic.es>,
// Marta Sorribes Gil <msorribes@ietcc.csic.es>
//! Servicios
use std::fmt;
use std::str;
use serde::{Deserialize, Serialize};
use crate::error::EpbdError;
/// Uso al que está destinada la energía
///
/// Algunos servicios pueden estar incluidos ya en el consumo de otros, como podría ser el
/// caso del consumo para HU en CAL, de DHU en REF o VEN en CAL y/o REF.
///
/// También debe tenerse en cuenta que algunos servicios, como la iluminación pueden considerarse
/// no EPB en algunos casos (p.e. residencial privado) y en ese caso no deben indicarse los consumos
/// como ILU sino como NEPB
#[allow(non_camel_case_types)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum Service {
/// DHW
ACS,
/// Heating (including humidification)
CAL,
/// Cooling (including dehumidification)
REF,
/// Ventilation, including heat recovery (when separate from heating or cooling)
VEN,
/// Lighting (only when considered as EPB use)
ILU,
/// Generic non EPB use
NEPB,
/// Energy feeding an electricity cogeneration system
/// It accounts for energy used for electricity generation and excludes all
/// energy that can attributed to thermal use
COGEN,
}
impl Service {
/// List of all available services
pub const SERVICES_ALL: [Service; 7] = [
Service::ACS,
Service::CAL,
Service::REF,
Service::VEN,
Service::ILU,
Service::NEPB,
Service::COGEN,
];
/// List EPB services
pub const SERVICES_EPB: [Service; 5] = [
Service::ACS,
Service::CAL,
Service::REF,
Service::VEN,
Service::ILU,
];
/// Check if service is an EPB service
/// This doesn't include the NEPB and GEN services
pub fn is_epb(&self) -> bool {
*self != Self::NEPB && *self != Self::COGEN
}
/// Check if service is a non EPB service
/// This doesn't include the GEN service
pub fn is_nepb(&self) -> bool {
*self == Self::NEPB
}
/// Check if service is for electricity cogeneration
pub fn is_cogen(&self) -> bool {
*self == Self::COGEN
}
}
impl str::FromStr for Service {
type Err = EpbdError;
fn from_str(s: &str) -> Result<Service, Self::Err> {
match s {
"ACS" => Ok(Service::ACS),
"CAL" => Ok(Service::CAL),
"REF" => Ok(Service::REF),
"VEN" => Ok(Service::VEN),
"ILU" => Ok(Service::ILU),
"NEPB" => Ok(Service::NEPB),
"COGEN" => Ok(Service::COGEN),
_ => Err(EpbdError::ParseError(s.into())),
}
}
}
impl std::fmt::Display for Service {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
|
mod webgl2_render_context;
//mod webgl2_render_graph_executor;
mod utils;
mod webgl2_render_resource_context;
pub use webgl2_render_context::*;
//pub use webgl2_render_graph_executor::*;
pub use webgl2_render_resource_context::*;
pub use js_sys;
pub use wasm_bindgen::JsCast;
pub use web_sys::{
WebGl2RenderingContext, WebGlBuffer, WebGlProgram, WebGlShader, WebGlTexture,
WebGlUniformLocation, WebGlVertexArrayObject,
};
pub type Gl = WebGl2RenderingContext;
pub use utils::*;
|
pub mod claims;
|
use crate::diesel::ExpressionMethods;
use crate::diesel::QueryDsl;
use crate::diesel::RunQueryDsl;
use crate::models;
use crate::schema::users;
use crate::virtual_schema::users_todos;
use bcrypt;
use diesel::result;
use diesel::sql_query;
use serde::ser::SerializeStruct;
use uuid::Uuid;
/// Main user model that will be used for interaction with users
/// in the database. All the interaction methods should be attached
/// to this model.
#[derive(Queryable, PartialEq, Debug, Clone)]
pub struct User {
pub id: String,
pub email: String,
pub password: String,
}
impl serde::Serialize for User {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut s = serializer.serialize_struct("User", 3)?;
s.serialize_field("id", &self.id)?;
s.serialize_field("email", &self.email)?;
s.end()
}
}
impl User {
/// Create new user
pub fn new(email: String, hashed_password: String) -> User {
User {
id: Uuid::new_v4().to_string(),
email,
password: hashed_password,
}
}
/// Get all users out of the db
pub fn all(connection: &crate::diesel::PgConnection) -> Result<Vec<Self>, result::Error> {
users::table.load::<Self>(connection)
}
/// Find single user by its email
pub fn find_by_email(
connection: &crate::diesel::PgConnection,
email: &str,
) -> Result<Option<User>, result::Error> {
match users::table
.filter(users::email.eq(email))
.load::<User>(connection)
{
Ok(mut results) => Ok(results.pop()),
Err(e) => Err(e),
}
}
/// Add todo for selected user
pub fn add_todo(
&self,
connection: &crate::diesel::PgConnection,
todo_content: &str,
) -> Result<models::todo::Todo, result::Error> {
models::todo::NewTodo::create(connection, &self.id, &todo_content)
}
/// Generate authentication JWT token
pub fn generate_jwt(&self) -> String {
crate::services::jwt::generate(&self)
}
/// Convert decoded claims from JWT token into an User object
pub fn from_jwt(claims: &crate::services::jwt::Claims) -> Self {
User {
id: String::from(&claims.sub),
email: String::from(&claims.email),
password: String::new(),
}
}
}
/// User model that implements method to retrieve user with his todos together
#[derive(Queryable, PartialEq, Debug, serde::Serialize)]
pub struct UserWithTodo {
pub id: String,
pub email: String,
pub todos: Vec<models::todo::Todo>,
}
/// Temporary struct for incoming data from the join sql query
#[derive(QueryableByName, PartialEq, Debug)]
#[table_name = "users_todos"]
struct TempUserWithTodo {
id: String,
email: String,
content: String,
checked: bool,
user_id: String,
}
impl UserWithTodo {
/// Get user struct with todos included
pub fn show(connection: &crate::diesel::PgConnection, id: &str) -> Result<Self, result::Error> {
let results = sql_query(format!(include_str!("../../sql/user_with_todos.sql"), id))
.load::<TempUserWithTodo>(connection)?;
let mut user = match results.first() {
Some(item) => UserWithTodo {
id: String::from(&item.user_id),
email: String::from(&item.email),
todos: vec![],
},
_ => return Err(result::Error::NotFound),
};
for todo in results.iter() {
user.todos.push(models::todo::Todo {
id: String::from(&todo.id),
user_id: String::from(&todo.user_id),
content: String::from(&todo.content),
checked: todo.checked,
});
}
Ok(user)
}
}
/// Struct for attributes required to create new user
/// new user can be created using this struct method create.
#[derive(Queryable, Insertable, Debug, serde::Deserialize)]
#[table_name = "users"]
pub struct NewUser {
pub email: String,
pub password: String,
}
impl NewUser {
/// Create new user with email and password
/// password will be automatically hashed into bcrypt.
pub fn create<'a>(
connection: &crate::diesel::PgConnection,
email: &'a str,
password: &'a str,
) -> Result<User, result::Error> {
let hashed_password = match bcrypt::hash(&password, bcrypt::DEFAULT_COST) {
Ok(hashed) => hashed,
Err(e) => {
println!("Hashing password error: {:?}", e);
return Err(result::Error::__Nonexhaustive);
}
};
let values = Self {
email: String::from(email),
password: hashed_password.to_string(),
};
diesel::insert_into(users::table)
.values(&values)
.get_result::<User>(connection)
}
}
|
use crate::spatial_ref::SpatialRef;
use crate::utils::{_last_null_pointer_err, _string};
use crate::vector::layer::Layer;
use gdal_sys::{
self, OGRFeatureDefnH, OGRFieldDefnH, OGRFieldType, OGRGeomFieldDefnH, OGRwkbGeometryType,
};
use libc::c_int;
use crate::errors::*;
/// Layer definition
///
/// Defines the fields available for features in a layer.
#[derive(Debug)]
pub struct Defn {
c_defn: OGRFeatureDefnH,
}
impl Defn {
/// Creates a new Defn by wrapping a C pointer
///
/// # Safety
/// This method operates on a raw C pointer
pub unsafe fn from_c_defn(c_defn: OGRFeatureDefnH) -> Defn {
Defn { c_defn }
}
/// Returns the wrapped C pointer
///
/// # Safety
/// This method returns a raw C pointer
pub unsafe fn c_defn(&self) -> OGRFeatureDefnH {
self.c_defn
}
/// Iterate over the field schema of this layer.
pub fn fields(&self) -> FieldIterator {
let total = unsafe { gdal_sys::OGR_FD_GetFieldCount(self.c_defn) } as isize;
FieldIterator {
defn: self,
c_feature_defn: self.c_defn,
next_id: 0,
total,
}
}
/// Iterate over the geometry field schema of this layer.
pub fn geom_fields(&self) -> GeomFieldIterator {
let total = unsafe { gdal_sys::OGR_FD_GetGeomFieldCount(self.c_defn) } as isize;
GeomFieldIterator {
defn: self,
c_feature_defn: self.c_defn,
next_id: 0,
total,
}
}
pub fn from_layer(lyr: &Layer) -> Defn {
let c_defn = unsafe { gdal_sys::OGR_L_GetLayerDefn(lyr.c_layer()) };
Defn { c_defn }
}
}
pub struct FieldIterator<'a> {
defn: &'a Defn,
c_feature_defn: OGRFeatureDefnH,
next_id: isize,
total: isize,
}
impl<'a> Iterator for FieldIterator<'a> {
type Item = Field<'a>;
#[inline]
fn next(&mut self) -> Option<Field<'a>> {
if self.next_id == self.total {
return None;
}
let field = Field {
_defn: self.defn,
c_field_defn: unsafe {
gdal_sys::OGR_FD_GetFieldDefn(self.c_feature_defn, self.next_id as c_int)
},
};
self.next_id += 1;
Some(field)
}
}
pub struct Field<'a> {
_defn: &'a Defn,
c_field_defn: OGRFieldDefnH,
}
impl<'a> Field<'a> {
/// Get the name of this field.
pub fn name(&'a self) -> String {
let rv = unsafe { gdal_sys::OGR_Fld_GetNameRef(self.c_field_defn) };
_string(rv)
}
pub fn field_type(&'a self) -> OGRFieldType::Type {
unsafe { gdal_sys::OGR_Fld_GetType(self.c_field_defn) }
}
pub fn width(&'a self) -> i32 {
unsafe { gdal_sys::OGR_Fld_GetWidth(self.c_field_defn) }
}
pub fn precision(&'a self) -> i32 {
unsafe { gdal_sys::OGR_Fld_GetPrecision(self.c_field_defn) }
}
}
pub struct GeomFieldIterator<'a> {
defn: &'a Defn,
c_feature_defn: OGRFeatureDefnH,
next_id: isize,
total: isize,
}
impl<'a> Iterator for GeomFieldIterator<'a> {
type Item = GeomField<'a>;
#[inline]
fn next(&mut self) -> Option<GeomField<'a>> {
if self.next_id == self.total {
return None;
}
let field = GeomField {
_defn: self.defn,
c_field_defn: unsafe {
gdal_sys::OGR_FD_GetGeomFieldDefn(self.c_feature_defn, self.next_id as c_int)
},
};
self.next_id += 1;
Some(field)
}
}
// http://gdal.org/classOGRGeomFieldDefn.html
pub struct GeomField<'a> {
_defn: &'a Defn,
c_field_defn: OGRGeomFieldDefnH,
}
impl<'a> GeomField<'a> {
/// Get the name of this field.
pub fn name(&'a self) -> String {
let rv = unsafe { gdal_sys::OGR_GFld_GetNameRef(self.c_field_defn) };
_string(rv)
}
pub fn field_type(&'a self) -> OGRwkbGeometryType::Type {
unsafe { gdal_sys::OGR_GFld_GetType(self.c_field_defn) }
}
pub fn spatial_ref(&'a self) -> Result<SpatialRef> {
let c_obj = unsafe { gdal_sys::OGR_GFld_GetSpatialRef(self.c_field_defn) };
if c_obj.is_null() {
return Err(_last_null_pointer_err("OGR_GFld_GetSpatialRef"));
}
SpatialRef::from_c_obj(c_obj)
}
}
|
mod image;
mod root_entry;
mod bios_param;
pub use self::image::Image;
pub use self::root_entry::RootEntry;
pub use self::bios_param::BIOSParam;
pub fn cluster_num_is_valid(cluster_num: u16) -> bool {
2 <= cluster_num && cluster_num < 0xff0
}
|
use crate::tag::Tag;
/// View into a subfield of a MARC field
pub struct Subfield<'a> {
tag: Tag,
identifier: u8,
data: &'a [u8],
} |
use crate::{client::*, match_controller::*};
use futures::prelude::*;
use mahjong::{match_state::*, messages::*};
use std::{collections::HashMap, sync::Arc};
use thespian::*;
use tracing::*;
use tracing_futures::Instrument;
use warp::Filter;
mod client;
mod match_controller;
#[tokio::main]
async fn main() {
// Setup the global logger.
let subscriber = tracing_subscriber::FmtSubscriber::builder()
.with_max_level(Level::TRACE)
.finish();
tracing::subscriber::set_global_default(subscriber).expect("setting default subscriber failed");
// Create the game state actor and spawn it, holding on to its proxy so that the
// socket tasks can still communicate with it.
let stage = GameState::new().into_stage();
let game = stage.proxy();
tokio::spawn(stage.run());
let client_id_generator = Arc::new(ClientIdGenerator::new());
let client = warp::path("client")
.and(warp::ws())
.map(move |ws: warp::ws::Ws| {
let game = game.clone();
let id = client_id_generator.next();
ws.on_upgrade(move |socket| {
async move {
// Perform the handshake sequence with the client in order to initiate the session.
let (mut client, mut stream) =
match ClientController::perform_handshake(id, socket, game).await {
Ok(result) => result,
// Log the failed connection attempt and then disconnect from the client.
Err(err) => {
warn!(%err, "Failed to establish connection with client");
return;
}
};
while let Some(message) = stream.next().await {
match message {
Ok(message) => {
if message.is_close() {
info!("Socket connection closed, shutting down client");
// TODO: Actually shut down the client actor.
return;
}
let result = client
.handle_message(message)
.expect("Failed to communicate with client actor")
.await;
if let Err(err) = result {
error!("Error handling client message: {:?}", err);
}
}
Err(err) => {
info!(%err, "Received error message from socket, cancelling connection");
break;
}
}
}
// TODO: Notify game that the client has disconnected.
}
.instrument(trace_span!("Socket message pump", %id))
})
});
let index = warp::path::end().map(|| warp::reply::html(INDEX_HTML));
let routes = index.or(client);
warp::serve(routes).run(([127, 0, 0, 1], 3030)).await;
}
/// Central storage of state data for the game.
///
/// This struct simulates the role of a database, acting as central storage of state data for the game.
#[derive(Debug, Default, Actor)]
pub struct GameState {
accounts: HashMap<AccountId, Account>,
// TODO: These ID counters should be generated by the database rather than the server.
account_id_counter: u64,
match_id_counter: u32,
}
impl GameState {
pub fn new() -> Self {
Default::default()
}
}
#[thespian::actor]
impl GameState {
#[tracing::instrument(skip(self))]
pub fn create_account(&mut self) -> Account {
// Increment the account ID counter to get the next unused ID.
self.account_id_counter += 1;
let id = AccountId::new(self.account_id_counter);
info!(%id, "Creating new player account");
// Create the credentials for the new account. For now we generate dummy
// credentials, eventually this will be replaced with some system for
// generating credentials.
let token = String::from("DUMMY");
let credentials = Credentials { id, token };
// Setup initial state for the account. We'll start players out with 10,000
// points because why not.
let data = PlayerState { points: 10_000 };
// Store the new account.
let account = Account { credentials, data };
let old = self.accounts.insert(id, account.clone());
assert!(old.is_none(), "Created duplicate account, id: {:?}", id);
account
}
#[tracing::instrument(skip(self))]
pub fn start_match(&mut self) -> MatchControllerProxy {
self.match_id_counter += 1;
let id = MatchId::new(self.match_id_counter);
info!(%id, "Starting a new match");
let stage = MatchController::new(id).into_stage();
let proxy = stage.proxy();
tokio::spawn(stage.run());
proxy
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Account {
credentials: Credentials,
data: PlayerState,
}
static INDEX_HTML: &str = r#"
<!DOCTYPE html>
<html>
<head>
<title>Warp Chat</title>
</head>
<body>
<h1>warp chat</h1>
<div id="chat">
<p><em>Connecting...</em></p>
</div>
<input type="text" id="text" />
<button type="button" id="send">Send</button>
<script type="text/javascript">
var uri = 'ws://' + location.host + '/client';
var ws = new WebSocket(uri);
function message(data) {
var line = document.createElement('p');
line.innerText = data;
chat.appendChild(line);
}
ws.onopen = function() {
chat.innerHTML = "<p><em>Connected!</em></p>";
}
ws.onmessage = function(msg) {
message(msg.data);
};
send.onclick = function() {
var msg = text.value;
ws.send(msg);
text.value = '';
message('<You>: ' + msg);
};
</script>
</body>
</html>
"#;
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - clock control register"]
pub cr: CR,
#[doc = "0x04 - RCC Internal Clock Source Calibration Register"]
pub icscr: ICSCR,
#[doc = "0x08 - RCC Clock Recovery RC Register"]
pub crrcr: CRRCR,
_reserved3: [u8; 4usize],
#[doc = "0x10 - RCC Clock Configuration Register"]
pub cfgr: CFGR,
_reserved4: [u8; 4usize],
#[doc = "0x18 - RCC Domain 1 Clock Configuration Register"]
pub d1cfgr: D1CFGR,
#[doc = "0x1c - RCC Domain 2 Clock Configuration Register"]
pub d2cfgr: D2CFGR,
#[doc = "0x20 - RCC Domain 3 Clock Configuration Register"]
pub d3cfgr: D3CFGR,
_reserved7: [u8; 4usize],
#[doc = "0x28 - RCC PLLs Clock Source Selection Register"]
pub pllckselr: PLLCKSELR,
#[doc = "0x2c - RCC PLLs Configuration Register"]
pub pllcfgr: PLLCFGR,
#[doc = "0x30 - RCC PLL1 Dividers Configuration Register"]
pub pll1divr: PLL1DIVR,
#[doc = "0x34 - RCC PLL1 Fractional Divider Register"]
pub pll1fracr: PLL1FRACR,
#[doc = "0x38 - RCC PLL2 Dividers Configuration Register"]
pub pll2divr: PLL2DIVR,
#[doc = "0x3c - RCC PLL2 Fractional Divider Register"]
pub pll2fracr: PLL2FRACR,
#[doc = "0x40 - RCC PLL3 Dividers Configuration Register"]
pub pll3divr: PLL3DIVR,
#[doc = "0x44 - RCC PLL3 Fractional Divider Register"]
pub pll3fracr: PLL3FRACR,
_reserved15: [u8; 4usize],
#[doc = "0x4c - RCC Domain 1 Kernel Clock Configuration Register"]
pub d1ccipr: D1CCIPR,
#[doc = "0x50 - RCC Domain 2 Kernel Clock Configuration Register"]
pub d2ccip1r: D2CCIP1R,
#[doc = "0x54 - RCC Domain 2 Kernel Clock Configuration Register"]
pub d2ccip2r: D2CCIP2R,
#[doc = "0x58 - RCC Domain 3 Kernel Clock Configuration Register"]
pub d3ccipr: D3CCIPR,
_reserved19: [u8; 4usize],
#[doc = "0x60 - RCC Clock Source Interrupt Enable Register"]
pub cier: CIER,
#[doc = "0x64 - RCC Clock Source Interrupt Flag Register"]
pub cifr: CIFR,
#[doc = "0x68 - RCC Clock Source Interrupt Clear Register"]
pub cicr: CICR,
_reserved22: [u8; 4usize],
#[doc = "0x70 - RCC Backup Domain Control Register"]
pub bdcr: BDCR,
#[doc = "0x74 - RCC Clock Control and Status Register"]
pub csr: CSR,
_reserved24: [u8; 4usize],
#[doc = "0x7c - RCC AHB3 Reset Register"]
pub ahb3rstr: AHB3RSTR,
#[doc = "0x80 - RCC AHB1 Peripheral Reset Register"]
pub ahb1rstr: AHB1RSTR,
#[doc = "0x84 - RCC AHB2 Peripheral Reset Register"]
pub ahb2rstr: AHB2RSTR,
#[doc = "0x88 - RCC AHB4 Peripheral Reset Register"]
pub ahb4rstr: AHB4RSTR,
#[doc = "0x8c - RCC APB3 Peripheral Reset Register"]
pub apb3rstr: APB3RSTR,
#[doc = "0x90 - RCC APB1 Peripheral Reset Register"]
pub apb1lrstr: APB1LRSTR,
#[doc = "0x94 - RCC APB1 Peripheral Reset Register"]
pub apb1hrstr: APB1HRSTR,
#[doc = "0x98 - RCC APB2 Peripheral Reset Register"]
pub apb2rstr: APB2RSTR,
#[doc = "0x9c - RCC APB4 Peripheral Reset Register"]
pub apb4rstr: APB4RSTR,
#[doc = "0xa0 - RCC Global Control Register"]
pub gcr: GCR,
_reserved34: [u8; 4usize],
#[doc = "0xa8 - RCC D3 Autonomous mode Register"]
pub d3amr: D3AMR,
_reserved35: [u8; 36usize],
#[doc = "0xd0 - RCC Reset Status Register"]
pub rsr: RSR,
#[doc = "0xd4 - RCC AHB3 Clock Register"]
pub ahb3enr: AHB3ENR,
#[doc = "0xd8 - RCC AHB1 Clock Register"]
pub ahb1enr: AHB1ENR,
#[doc = "0xdc - RCC AHB2 Clock Register"]
pub ahb2enr: AHB2ENR,
#[doc = "0xe0 - RCC AHB4 Clock Register"]
pub ahb4enr: AHB4ENR,
#[doc = "0xe4 - RCC APB3 Clock Register"]
pub apb3enr: APB3ENR,
#[doc = "0xe8 - RCC APB1 Clock Register"]
pub apb1lenr: APB1LENR,
#[doc = "0xec - RCC APB1 Clock Register"]
pub apb1henr: APB1HENR,
#[doc = "0xf0 - RCC APB2 Clock Register"]
pub apb2enr: APB2ENR,
#[doc = "0xf4 - RCC APB4 Clock Register"]
pub apb4enr: APB4ENR,
_reserved45: [u8; 4usize],
#[doc = "0xfc - RCC AHB3 Sleep Clock Register"]
pub ahb3lpenr: AHB3LPENR,
#[doc = "0x100 - RCC AHB1 Sleep Clock Register"]
pub ahb1lpenr: AHB1LPENR,
#[doc = "0x104 - RCC AHB2 Sleep Clock Register"]
pub ahb2lpenr: AHB2LPENR,
#[doc = "0x108 - RCC AHB4 Sleep Clock Register"]
pub ahb4lpenr: AHB4LPENR,
#[doc = "0x10c - RCC APB3 Sleep Clock Register"]
pub apb3lpenr: APB3LPENR,
#[doc = "0x110 - RCC APB1 Low Sleep Clock Register"]
pub apb1llpenr: APB1LLPENR,
#[doc = "0x114 - RCC APB1 High Sleep Clock Register"]
pub apb1hlpenr: APB1HLPENR,
#[doc = "0x118 - RCC APB2 Sleep Clock Register"]
pub apb2lpenr: APB2LPENR,
#[doc = "0x11c - RCC APB4 Sleep Clock Register"]
pub apb4lpenr: APB4LPENR,
_reserved54: [u8; 16usize],
#[doc = "0x130 - RCC Reset Status Register"]
pub c1_rsr: C1_RSR,
#[doc = "0x134 - RCC AHB3 Clock Register"]
pub c1_ahb3enr: C1_AHB3ENR,
#[doc = "0x138 - RCC AHB1 Clock Register"]
pub c1_ahb1enr: C1_AHB1ENR,
#[doc = "0x13c - RCC AHB2 Clock Register"]
pub c1_ahb2enr: C1_AHB2ENR,
#[doc = "0x140 - RCC AHB4 Clock Register"]
pub c1_ahb4enr: C1_AHB4ENR,
#[doc = "0x144 - RCC APB3 Clock Register"]
pub c1_apb3enr: C1_APB3ENR,
#[doc = "0x148 - RCC APB1 Clock Register"]
pub c1_apb1lenr: C1_APB1LENR,
#[doc = "0x14c - RCC APB1 Clock Register"]
pub c1_apb1henr: C1_APB1HENR,
#[doc = "0x150 - RCC APB2 Clock Register"]
pub c1_apb2enr: C1_APB2ENR,
#[doc = "0x154 - RCC APB4 Clock Register"]
pub c1_apb4enr: C1_APB4ENR,
_reserved64: [u8; 4usize],
#[doc = "0x15c - RCC AHB3 Sleep Clock Register"]
pub c1_ahb3lpenr: C1_AHB3LPENR,
#[doc = "0x160 - RCC AHB1 Sleep Clock Register"]
pub c1_ahb1lpenr: C1_AHB1LPENR,
#[doc = "0x164 - RCC AHB2 Sleep Clock Register"]
pub c1_ahb2lpenr: C1_AHB2LPENR,
#[doc = "0x168 - RCC AHB4 Sleep Clock Register"]
pub c1_ahb4lpenr: C1_AHB4LPENR,
#[doc = "0x16c - RCC APB3 Sleep Clock Register"]
pub c1_apb3lpenr: C1_APB3LPENR,
#[doc = "0x170 - RCC APB1 Low Sleep Clock Register"]
pub c1_apb1llpenr: C1_APB1LLPENR,
#[doc = "0x174 - RCC APB1 High Sleep Clock Register"]
pub c1_apb1hlpenr: C1_APB1HLPENR,
#[doc = "0x178 - RCC APB2 Sleep Clock Register"]
pub c1_apb2lpenr: C1_APB2LPENR,
#[doc = "0x17c - RCC APB4 Sleep Clock Register"]
pub c1_apb4lpenr: C1_APB4LPENR,
}
#[doc = "clock control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cr](cr) module"]
pub type CR = crate::Reg<u32, _CR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CR;
#[doc = "`read()` method returns [cr::R](cr::R) reader structure"]
impl crate::Readable for CR {}
#[doc = "`write(|w| ..)` method takes [cr::W](cr::W) writer structure"]
impl crate::Writable for CR {}
#[doc = "clock control register"]
pub mod cr;
#[doc = "RCC Internal Clock Source Calibration Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [icscr](icscr) module"]
pub type ICSCR = crate::Reg<u32, _ICSCR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _ICSCR;
#[doc = "`read()` method returns [icscr::R](icscr::R) reader structure"]
impl crate::Readable for ICSCR {}
#[doc = "`write(|w| ..)` method takes [icscr::W](icscr::W) writer structure"]
impl crate::Writable for ICSCR {}
#[doc = "RCC Internal Clock Source Calibration Register"]
pub mod icscr;
#[doc = "RCC Clock Recovery RC Register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [crrcr](crrcr) module"]
pub type CRRCR = crate::Reg<u32, _CRRCR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CRRCR;
#[doc = "`read()` method returns [crrcr::R](crrcr::R) reader structure"]
impl crate::Readable for CRRCR {}
#[doc = "RCC Clock Recovery RC Register"]
pub mod crrcr;
#[doc = "RCC Clock Configuration Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cfgr](cfgr) module"]
pub type CFGR = crate::Reg<u32, _CFGR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CFGR;
#[doc = "`read()` method returns [cfgr::R](cfgr::R) reader structure"]
impl crate::Readable for CFGR {}
#[doc = "`write(|w| ..)` method takes [cfgr::W](cfgr::W) writer structure"]
impl crate::Writable for CFGR {}
#[doc = "RCC Clock Configuration Register"]
pub mod cfgr;
#[doc = "RCC Domain 1 Clock Configuration Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [d1cfgr](d1cfgr) module"]
pub type D1CFGR = crate::Reg<u32, _D1CFGR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _D1CFGR;
#[doc = "`read()` method returns [d1cfgr::R](d1cfgr::R) reader structure"]
impl crate::Readable for D1CFGR {}
#[doc = "`write(|w| ..)` method takes [d1cfgr::W](d1cfgr::W) writer structure"]
impl crate::Writable for D1CFGR {}
#[doc = "RCC Domain 1 Clock Configuration Register"]
pub mod d1cfgr;
#[doc = "RCC Domain 2 Clock Configuration Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [d2cfgr](d2cfgr) module"]
pub type D2CFGR = crate::Reg<u32, _D2CFGR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _D2CFGR;
#[doc = "`read()` method returns [d2cfgr::R](d2cfgr::R) reader structure"]
impl crate::Readable for D2CFGR {}
#[doc = "`write(|w| ..)` method takes [d2cfgr::W](d2cfgr::W) writer structure"]
impl crate::Writable for D2CFGR {}
#[doc = "RCC Domain 2 Clock Configuration Register"]
pub mod d2cfgr;
#[doc = "RCC Domain 3 Clock Configuration Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [d3cfgr](d3cfgr) module"]
pub type D3CFGR = crate::Reg<u32, _D3CFGR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _D3CFGR;
#[doc = "`read()` method returns [d3cfgr::R](d3cfgr::R) reader structure"]
impl crate::Readable for D3CFGR {}
#[doc = "`write(|w| ..)` method takes [d3cfgr::W](d3cfgr::W) writer structure"]
impl crate::Writable for D3CFGR {}
#[doc = "RCC Domain 3 Clock Configuration Register"]
pub mod d3cfgr;
#[doc = "RCC PLLs Clock Source Selection Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pllckselr](pllckselr) module"]
pub type PLLCKSELR = crate::Reg<u32, _PLLCKSELR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PLLCKSELR;
#[doc = "`read()` method returns [pllckselr::R](pllckselr::R) reader structure"]
impl crate::Readable for PLLCKSELR {}
#[doc = "`write(|w| ..)` method takes [pllckselr::W](pllckselr::W) writer structure"]
impl crate::Writable for PLLCKSELR {}
#[doc = "RCC PLLs Clock Source Selection Register"]
pub mod pllckselr;
#[doc = "RCC PLLs Configuration Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pllcfgr](pllcfgr) module"]
pub type PLLCFGR = crate::Reg<u32, _PLLCFGR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PLLCFGR;
#[doc = "`read()` method returns [pllcfgr::R](pllcfgr::R) reader structure"]
impl crate::Readable for PLLCFGR {}
#[doc = "`write(|w| ..)` method takes [pllcfgr::W](pllcfgr::W) writer structure"]
impl crate::Writable for PLLCFGR {}
#[doc = "RCC PLLs Configuration Register"]
pub mod pllcfgr;
#[doc = "RCC PLL1 Dividers Configuration Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pll1divr](pll1divr) module"]
pub type PLL1DIVR = crate::Reg<u32, _PLL1DIVR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PLL1DIVR;
#[doc = "`read()` method returns [pll1divr::R](pll1divr::R) reader structure"]
impl crate::Readable for PLL1DIVR {}
#[doc = "`write(|w| ..)` method takes [pll1divr::W](pll1divr::W) writer structure"]
impl crate::Writable for PLL1DIVR {}
#[doc = "RCC PLL1 Dividers Configuration Register"]
pub mod pll1divr;
#[doc = "RCC PLL1 Fractional Divider Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pll1fracr](pll1fracr) module"]
pub type PLL1FRACR = crate::Reg<u32, _PLL1FRACR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PLL1FRACR;
#[doc = "`read()` method returns [pll1fracr::R](pll1fracr::R) reader structure"]
impl crate::Readable for PLL1FRACR {}
#[doc = "`write(|w| ..)` method takes [pll1fracr::W](pll1fracr::W) writer structure"]
impl crate::Writable for PLL1FRACR {}
#[doc = "RCC PLL1 Fractional Divider Register"]
pub mod pll1fracr;
#[doc = "RCC PLL2 Dividers Configuration Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pll2divr](pll2divr) module"]
pub type PLL2DIVR = crate::Reg<u32, _PLL2DIVR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PLL2DIVR;
#[doc = "`read()` method returns [pll2divr::R](pll2divr::R) reader structure"]
impl crate::Readable for PLL2DIVR {}
#[doc = "`write(|w| ..)` method takes [pll2divr::W](pll2divr::W) writer structure"]
impl crate::Writable for PLL2DIVR {}
#[doc = "RCC PLL2 Dividers Configuration Register"]
pub mod pll2divr;
#[doc = "RCC PLL2 Fractional Divider Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pll2fracr](pll2fracr) module"]
pub type PLL2FRACR = crate::Reg<u32, _PLL2FRACR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PLL2FRACR;
#[doc = "`read()` method returns [pll2fracr::R](pll2fracr::R) reader structure"]
impl crate::Readable for PLL2FRACR {}
#[doc = "`write(|w| ..)` method takes [pll2fracr::W](pll2fracr::W) writer structure"]
impl crate::Writable for PLL2FRACR {}
#[doc = "RCC PLL2 Fractional Divider Register"]
pub mod pll2fracr;
#[doc = "RCC PLL3 Dividers Configuration Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pll3divr](pll3divr) module"]
pub type PLL3DIVR = crate::Reg<u32, _PLL3DIVR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PLL3DIVR;
#[doc = "`read()` method returns [pll3divr::R](pll3divr::R) reader structure"]
impl crate::Readable for PLL3DIVR {}
#[doc = "`write(|w| ..)` method takes [pll3divr::W](pll3divr::W) writer structure"]
impl crate::Writable for PLL3DIVR {}
#[doc = "RCC PLL3 Dividers Configuration Register"]
pub mod pll3divr;
#[doc = "RCC PLL3 Fractional Divider Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pll3fracr](pll3fracr) module"]
pub type PLL3FRACR = crate::Reg<u32, _PLL3FRACR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PLL3FRACR;
#[doc = "`read()` method returns [pll3fracr::R](pll3fracr::R) reader structure"]
impl crate::Readable for PLL3FRACR {}
#[doc = "`write(|w| ..)` method takes [pll3fracr::W](pll3fracr::W) writer structure"]
impl crate::Writable for PLL3FRACR {}
#[doc = "RCC PLL3 Fractional Divider Register"]
pub mod pll3fracr;
#[doc = "RCC Domain 1 Kernel Clock Configuration Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [d1ccipr](d1ccipr) module"]
pub type D1CCIPR = crate::Reg<u32, _D1CCIPR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _D1CCIPR;
#[doc = "`read()` method returns [d1ccipr::R](d1ccipr::R) reader structure"]
impl crate::Readable for D1CCIPR {}
#[doc = "`write(|w| ..)` method takes [d1ccipr::W](d1ccipr::W) writer structure"]
impl crate::Writable for D1CCIPR {}
#[doc = "RCC Domain 1 Kernel Clock Configuration Register"]
pub mod d1ccipr;
#[doc = "RCC Domain 2 Kernel Clock Configuration Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [d2ccip1r](d2ccip1r) module"]
pub type D2CCIP1R = crate::Reg<u32, _D2CCIP1R>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _D2CCIP1R;
#[doc = "`read()` method returns [d2ccip1r::R](d2ccip1r::R) reader structure"]
impl crate::Readable for D2CCIP1R {}
#[doc = "`write(|w| ..)` method takes [d2ccip1r::W](d2ccip1r::W) writer structure"]
impl crate::Writable for D2CCIP1R {}
#[doc = "RCC Domain 2 Kernel Clock Configuration Register"]
pub mod d2ccip1r;
#[doc = "RCC Domain 2 Kernel Clock Configuration Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [d2ccip2r](d2ccip2r) module"]
pub type D2CCIP2R = crate::Reg<u32, _D2CCIP2R>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _D2CCIP2R;
#[doc = "`read()` method returns [d2ccip2r::R](d2ccip2r::R) reader structure"]
impl crate::Readable for D2CCIP2R {}
#[doc = "`write(|w| ..)` method takes [d2ccip2r::W](d2ccip2r::W) writer structure"]
impl crate::Writable for D2CCIP2R {}
#[doc = "RCC Domain 2 Kernel Clock Configuration Register"]
pub mod d2ccip2r;
#[doc = "RCC Domain 3 Kernel Clock Configuration Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [d3ccipr](d3ccipr) module"]
pub type D3CCIPR = crate::Reg<u32, _D3CCIPR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _D3CCIPR;
#[doc = "`read()` method returns [d3ccipr::R](d3ccipr::R) reader structure"]
impl crate::Readable for D3CCIPR {}
#[doc = "`write(|w| ..)` method takes [d3ccipr::W](d3ccipr::W) writer structure"]
impl crate::Writable for D3CCIPR {}
#[doc = "RCC Domain 3 Kernel Clock Configuration Register"]
pub mod d3ccipr;
#[doc = "RCC Clock Source Interrupt Enable Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cier](cier) module"]
pub type CIER = crate::Reg<u32, _CIER>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CIER;
#[doc = "`read()` method returns [cier::R](cier::R) reader structure"]
impl crate::Readable for CIER {}
#[doc = "`write(|w| ..)` method takes [cier::W](cier::W) writer structure"]
impl crate::Writable for CIER {}
#[doc = "RCC Clock Source Interrupt Enable Register"]
pub mod cier;
#[doc = "RCC Clock Source Interrupt Flag Register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cifr](cifr) module"]
pub type CIFR = crate::Reg<u32, _CIFR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CIFR;
#[doc = "`read()` method returns [cifr::R](cifr::R) reader structure"]
impl crate::Readable for CIFR {}
#[doc = "RCC Clock Source Interrupt Flag Register"]
pub mod cifr;
#[doc = "RCC Clock Source Interrupt Clear Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cicr](cicr) module"]
pub type CICR = crate::Reg<u32, _CICR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CICR;
#[doc = "`read()` method returns [cicr::R](cicr::R) reader structure"]
impl crate::Readable for CICR {}
#[doc = "`write(|w| ..)` method takes [cicr::W](cicr::W) writer structure"]
impl crate::Writable for CICR {}
#[doc = "RCC Clock Source Interrupt Clear Register"]
pub mod cicr;
#[doc = "RCC Backup Domain Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [bdcr](bdcr) module"]
pub type BDCR = crate::Reg<u32, _BDCR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _BDCR;
#[doc = "`read()` method returns [bdcr::R](bdcr::R) reader structure"]
impl crate::Readable for BDCR {}
#[doc = "`write(|w| ..)` method takes [bdcr::W](bdcr::W) writer structure"]
impl crate::Writable for BDCR {}
#[doc = "RCC Backup Domain Control Register"]
pub mod bdcr;
#[doc = "RCC Clock Control and Status Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [csr](csr) module"]
pub type CSR = crate::Reg<u32, _CSR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CSR;
#[doc = "`read()` method returns [csr::R](csr::R) reader structure"]
impl crate::Readable for CSR {}
#[doc = "`write(|w| ..)` method takes [csr::W](csr::W) writer structure"]
impl crate::Writable for CSR {}
#[doc = "RCC Clock Control and Status Register"]
pub mod csr;
#[doc = "RCC AHB3 Reset Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ahb3rstr](ahb3rstr) module"]
pub type AHB3RSTR = crate::Reg<u32, _AHB3RSTR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _AHB3RSTR;
#[doc = "`read()` method returns [ahb3rstr::R](ahb3rstr::R) reader structure"]
impl crate::Readable for AHB3RSTR {}
#[doc = "`write(|w| ..)` method takes [ahb3rstr::W](ahb3rstr::W) writer structure"]
impl crate::Writable for AHB3RSTR {}
#[doc = "RCC AHB3 Reset Register"]
pub mod ahb3rstr;
#[doc = "RCC AHB1 Peripheral Reset Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ahb1rstr](ahb1rstr) module"]
pub type AHB1RSTR = crate::Reg<u32, _AHB1RSTR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _AHB1RSTR;
#[doc = "`read()` method returns [ahb1rstr::R](ahb1rstr::R) reader structure"]
impl crate::Readable for AHB1RSTR {}
#[doc = "`write(|w| ..)` method takes [ahb1rstr::W](ahb1rstr::W) writer structure"]
impl crate::Writable for AHB1RSTR {}
#[doc = "RCC AHB1 Peripheral Reset Register"]
pub mod ahb1rstr;
#[doc = "RCC AHB2 Peripheral Reset Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ahb2rstr](ahb2rstr) module"]
pub type AHB2RSTR = crate::Reg<u32, _AHB2RSTR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _AHB2RSTR;
#[doc = "`read()` method returns [ahb2rstr::R](ahb2rstr::R) reader structure"]
impl crate::Readable for AHB2RSTR {}
#[doc = "`write(|w| ..)` method takes [ahb2rstr::W](ahb2rstr::W) writer structure"]
impl crate::Writable for AHB2RSTR {}
#[doc = "RCC AHB2 Peripheral Reset Register"]
pub mod ahb2rstr;
#[doc = "RCC AHB4 Peripheral Reset Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ahb4rstr](ahb4rstr) module"]
pub type AHB4RSTR = crate::Reg<u32, _AHB4RSTR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _AHB4RSTR;
#[doc = "`read()` method returns [ahb4rstr::R](ahb4rstr::R) reader structure"]
impl crate::Readable for AHB4RSTR {}
#[doc = "`write(|w| ..)` method takes [ahb4rstr::W](ahb4rstr::W) writer structure"]
impl crate::Writable for AHB4RSTR {}
#[doc = "RCC AHB4 Peripheral Reset Register"]
pub mod ahb4rstr;
#[doc = "RCC APB3 Peripheral Reset Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [apb3rstr](apb3rstr) module"]
pub type APB3RSTR = crate::Reg<u32, _APB3RSTR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _APB3RSTR;
#[doc = "`read()` method returns [apb3rstr::R](apb3rstr::R) reader structure"]
impl crate::Readable for APB3RSTR {}
#[doc = "`write(|w| ..)` method takes [apb3rstr::W](apb3rstr::W) writer structure"]
impl crate::Writable for APB3RSTR {}
#[doc = "RCC APB3 Peripheral Reset Register"]
pub mod apb3rstr;
#[doc = "RCC APB1 Peripheral Reset Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [apb1lrstr](apb1lrstr) module"]
pub type APB1LRSTR = crate::Reg<u32, _APB1LRSTR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _APB1LRSTR;
#[doc = "`read()` method returns [apb1lrstr::R](apb1lrstr::R) reader structure"]
impl crate::Readable for APB1LRSTR {}
#[doc = "`write(|w| ..)` method takes [apb1lrstr::W](apb1lrstr::W) writer structure"]
impl crate::Writable for APB1LRSTR {}
#[doc = "RCC APB1 Peripheral Reset Register"]
pub mod apb1lrstr;
#[doc = "RCC APB1 Peripheral Reset Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [apb1hrstr](apb1hrstr) module"]
pub type APB1HRSTR = crate::Reg<u32, _APB1HRSTR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _APB1HRSTR;
#[doc = "`read()` method returns [apb1hrstr::R](apb1hrstr::R) reader structure"]
impl crate::Readable for APB1HRSTR {}
#[doc = "`write(|w| ..)` method takes [apb1hrstr::W](apb1hrstr::W) writer structure"]
impl crate::Writable for APB1HRSTR {}
#[doc = "RCC APB1 Peripheral Reset Register"]
pub mod apb1hrstr;
#[doc = "RCC APB2 Peripheral Reset Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [apb2rstr](apb2rstr) module"]
pub type APB2RSTR = crate::Reg<u32, _APB2RSTR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _APB2RSTR;
#[doc = "`read()` method returns [apb2rstr::R](apb2rstr::R) reader structure"]
impl crate::Readable for APB2RSTR {}
#[doc = "`write(|w| ..)` method takes [apb2rstr::W](apb2rstr::W) writer structure"]
impl crate::Writable for APB2RSTR {}
#[doc = "RCC APB2 Peripheral Reset Register"]
pub mod apb2rstr;
#[doc = "RCC APB4 Peripheral Reset Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [apb4rstr](apb4rstr) module"]
pub type APB4RSTR = crate::Reg<u32, _APB4RSTR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _APB4RSTR;
#[doc = "`read()` method returns [apb4rstr::R](apb4rstr::R) reader structure"]
impl crate::Readable for APB4RSTR {}
#[doc = "`write(|w| ..)` method takes [apb4rstr::W](apb4rstr::W) writer structure"]
impl crate::Writable for APB4RSTR {}
#[doc = "RCC APB4 Peripheral Reset Register"]
pub mod apb4rstr;
#[doc = "RCC Global Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [gcr](gcr) module"]
pub type GCR = crate::Reg<u32, _GCR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _GCR;
#[doc = "`read()` method returns [gcr::R](gcr::R) reader structure"]
impl crate::Readable for GCR {}
#[doc = "`write(|w| ..)` method takes [gcr::W](gcr::W) writer structure"]
impl crate::Writable for GCR {}
#[doc = "RCC Global Control Register"]
pub mod gcr;
#[doc = "RCC D3 Autonomous mode Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [d3amr](d3amr) module"]
pub type D3AMR = crate::Reg<u32, _D3AMR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _D3AMR;
#[doc = "`read()` method returns [d3amr::R](d3amr::R) reader structure"]
impl crate::Readable for D3AMR {}
#[doc = "`write(|w| ..)` method takes [d3amr::W](d3amr::W) writer structure"]
impl crate::Writable for D3AMR {}
#[doc = "RCC D3 Autonomous mode Register"]
pub mod d3amr;
#[doc = "RCC Reset Status Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rsr](rsr) module"]
pub type RSR = crate::Reg<u32, _RSR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _RSR;
#[doc = "`read()` method returns [rsr::R](rsr::R) reader structure"]
impl crate::Readable for RSR {}
#[doc = "`write(|w| ..)` method takes [rsr::W](rsr::W) writer structure"]
impl crate::Writable for RSR {}
#[doc = "RCC Reset Status Register"]
pub mod rsr;
#[doc = "RCC Reset Status Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_rsr](c1_rsr) module"]
pub type C1_RSR = crate::Reg<u32, _C1_RSR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_RSR;
#[doc = "`read()` method returns [c1_rsr::R](c1_rsr::R) reader structure"]
impl crate::Readable for C1_RSR {}
#[doc = "`write(|w| ..)` method takes [c1_rsr::W](c1_rsr::W) writer structure"]
impl crate::Writable for C1_RSR {}
#[doc = "RCC Reset Status Register"]
pub mod c1_rsr;
#[doc = "RCC AHB3 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_ahb3enr](c1_ahb3enr) module"]
pub type C1_AHB3ENR = crate::Reg<u32, _C1_AHB3ENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_AHB3ENR;
#[doc = "`read()` method returns [c1_ahb3enr::R](c1_ahb3enr::R) reader structure"]
impl crate::Readable for C1_AHB3ENR {}
#[doc = "`write(|w| ..)` method takes [c1_ahb3enr::W](c1_ahb3enr::W) writer structure"]
impl crate::Writable for C1_AHB3ENR {}
#[doc = "RCC AHB3 Clock Register"]
pub mod c1_ahb3enr;
#[doc = "RCC AHB3 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ahb3enr](ahb3enr) module"]
pub type AHB3ENR = crate::Reg<u32, _AHB3ENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _AHB3ENR;
#[doc = "`read()` method returns [ahb3enr::R](ahb3enr::R) reader structure"]
impl crate::Readable for AHB3ENR {}
#[doc = "`write(|w| ..)` method takes [ahb3enr::W](ahb3enr::W) writer structure"]
impl crate::Writable for AHB3ENR {}
#[doc = "RCC AHB3 Clock Register"]
pub mod ahb3enr;
#[doc = "RCC AHB1 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ahb1enr](ahb1enr) module"]
pub type AHB1ENR = crate::Reg<u32, _AHB1ENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _AHB1ENR;
#[doc = "`read()` method returns [ahb1enr::R](ahb1enr::R) reader structure"]
impl crate::Readable for AHB1ENR {}
#[doc = "`write(|w| ..)` method takes [ahb1enr::W](ahb1enr::W) writer structure"]
impl crate::Writable for AHB1ENR {}
#[doc = "RCC AHB1 Clock Register"]
pub mod ahb1enr;
#[doc = "RCC AHB1 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_ahb1enr](c1_ahb1enr) module"]
pub type C1_AHB1ENR = crate::Reg<u32, _C1_AHB1ENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_AHB1ENR;
#[doc = "`read()` method returns [c1_ahb1enr::R](c1_ahb1enr::R) reader structure"]
impl crate::Readable for C1_AHB1ENR {}
#[doc = "`write(|w| ..)` method takes [c1_ahb1enr::W](c1_ahb1enr::W) writer structure"]
impl crate::Writable for C1_AHB1ENR {}
#[doc = "RCC AHB1 Clock Register"]
pub mod c1_ahb1enr;
#[doc = "RCC AHB2 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_ahb2enr](c1_ahb2enr) module"]
pub type C1_AHB2ENR = crate::Reg<u32, _C1_AHB2ENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_AHB2ENR;
#[doc = "`read()` method returns [c1_ahb2enr::R](c1_ahb2enr::R) reader structure"]
impl crate::Readable for C1_AHB2ENR {}
#[doc = "`write(|w| ..)` method takes [c1_ahb2enr::W](c1_ahb2enr::W) writer structure"]
impl crate::Writable for C1_AHB2ENR {}
#[doc = "RCC AHB2 Clock Register"]
pub mod c1_ahb2enr;
#[doc = "RCC AHB2 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ahb2enr](ahb2enr) module"]
pub type AHB2ENR = crate::Reg<u32, _AHB2ENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _AHB2ENR;
#[doc = "`read()` method returns [ahb2enr::R](ahb2enr::R) reader structure"]
impl crate::Readable for AHB2ENR {}
#[doc = "`write(|w| ..)` method takes [ahb2enr::W](ahb2enr::W) writer structure"]
impl crate::Writable for AHB2ENR {}
#[doc = "RCC AHB2 Clock Register"]
pub mod ahb2enr;
#[doc = "RCC AHB4 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ahb4enr](ahb4enr) module"]
pub type AHB4ENR = crate::Reg<u32, _AHB4ENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _AHB4ENR;
#[doc = "`read()` method returns [ahb4enr::R](ahb4enr::R) reader structure"]
impl crate::Readable for AHB4ENR {}
#[doc = "`write(|w| ..)` method takes [ahb4enr::W](ahb4enr::W) writer structure"]
impl crate::Writable for AHB4ENR {}
#[doc = "RCC AHB4 Clock Register"]
pub mod ahb4enr;
#[doc = "RCC AHB4 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_ahb4enr](c1_ahb4enr) module"]
pub type C1_AHB4ENR = crate::Reg<u32, _C1_AHB4ENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_AHB4ENR;
#[doc = "`read()` method returns [c1_ahb4enr::R](c1_ahb4enr::R) reader structure"]
impl crate::Readable for C1_AHB4ENR {}
#[doc = "`write(|w| ..)` method takes [c1_ahb4enr::W](c1_ahb4enr::W) writer structure"]
impl crate::Writable for C1_AHB4ENR {}
#[doc = "RCC AHB4 Clock Register"]
pub mod c1_ahb4enr;
#[doc = "RCC APB3 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_apb3enr](c1_apb3enr) module"]
pub type C1_APB3ENR = crate::Reg<u32, _C1_APB3ENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_APB3ENR;
#[doc = "`read()` method returns [c1_apb3enr::R](c1_apb3enr::R) reader structure"]
impl crate::Readable for C1_APB3ENR {}
#[doc = "`write(|w| ..)` method takes [c1_apb3enr::W](c1_apb3enr::W) writer structure"]
impl crate::Writable for C1_APB3ENR {}
#[doc = "RCC APB3 Clock Register"]
pub mod c1_apb3enr;
#[doc = "RCC APB3 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [apb3enr](apb3enr) module"]
pub type APB3ENR = crate::Reg<u32, _APB3ENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _APB3ENR;
#[doc = "`read()` method returns [apb3enr::R](apb3enr::R) reader structure"]
impl crate::Readable for APB3ENR {}
#[doc = "`write(|w| ..)` method takes [apb3enr::W](apb3enr::W) writer structure"]
impl crate::Writable for APB3ENR {}
#[doc = "RCC APB3 Clock Register"]
pub mod apb3enr;
#[doc = "RCC APB1 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [apb1lenr](apb1lenr) module"]
pub type APB1LENR = crate::Reg<u32, _APB1LENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _APB1LENR;
#[doc = "`read()` method returns [apb1lenr::R](apb1lenr::R) reader structure"]
impl crate::Readable for APB1LENR {}
#[doc = "`write(|w| ..)` method takes [apb1lenr::W](apb1lenr::W) writer structure"]
impl crate::Writable for APB1LENR {}
#[doc = "RCC APB1 Clock Register"]
pub mod apb1lenr;
#[doc = "RCC APB1 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_apb1lenr](c1_apb1lenr) module"]
pub type C1_APB1LENR = crate::Reg<u32, _C1_APB1LENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_APB1LENR;
#[doc = "`read()` method returns [c1_apb1lenr::R](c1_apb1lenr::R) reader structure"]
impl crate::Readable for C1_APB1LENR {}
#[doc = "`write(|w| ..)` method takes [c1_apb1lenr::W](c1_apb1lenr::W) writer structure"]
impl crate::Writable for C1_APB1LENR {}
#[doc = "RCC APB1 Clock Register"]
pub mod c1_apb1lenr;
#[doc = "RCC APB1 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [apb1henr](apb1henr) module"]
pub type APB1HENR = crate::Reg<u32, _APB1HENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _APB1HENR;
#[doc = "`read()` method returns [apb1henr::R](apb1henr::R) reader structure"]
impl crate::Readable for APB1HENR {}
#[doc = "`write(|w| ..)` method takes [apb1henr::W](apb1henr::W) writer structure"]
impl crate::Writable for APB1HENR {}
#[doc = "RCC APB1 Clock Register"]
pub mod apb1henr;
#[doc = "RCC APB1 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_apb1henr](c1_apb1henr) module"]
pub type C1_APB1HENR = crate::Reg<u32, _C1_APB1HENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_APB1HENR;
#[doc = "`read()` method returns [c1_apb1henr::R](c1_apb1henr::R) reader structure"]
impl crate::Readable for C1_APB1HENR {}
#[doc = "`write(|w| ..)` method takes [c1_apb1henr::W](c1_apb1henr::W) writer structure"]
impl crate::Writable for C1_APB1HENR {}
#[doc = "RCC APB1 Clock Register"]
pub mod c1_apb1henr;
#[doc = "RCC APB2 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_apb2enr](c1_apb2enr) module"]
pub type C1_APB2ENR = crate::Reg<u32, _C1_APB2ENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_APB2ENR;
#[doc = "`read()` method returns [c1_apb2enr::R](c1_apb2enr::R) reader structure"]
impl crate::Readable for C1_APB2ENR {}
#[doc = "`write(|w| ..)` method takes [c1_apb2enr::W](c1_apb2enr::W) writer structure"]
impl crate::Writable for C1_APB2ENR {}
#[doc = "RCC APB2 Clock Register"]
pub mod c1_apb2enr;
#[doc = "RCC APB2 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [apb2enr](apb2enr) module"]
pub type APB2ENR = crate::Reg<u32, _APB2ENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _APB2ENR;
#[doc = "`read()` method returns [apb2enr::R](apb2enr::R) reader structure"]
impl crate::Readable for APB2ENR {}
#[doc = "`write(|w| ..)` method takes [apb2enr::W](apb2enr::W) writer structure"]
impl crate::Writable for APB2ENR {}
#[doc = "RCC APB2 Clock Register"]
pub mod apb2enr;
#[doc = "RCC APB4 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [apb4enr](apb4enr) module"]
pub type APB4ENR = crate::Reg<u32, _APB4ENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _APB4ENR;
#[doc = "`read()` method returns [apb4enr::R](apb4enr::R) reader structure"]
impl crate::Readable for APB4ENR {}
#[doc = "`write(|w| ..)` method takes [apb4enr::W](apb4enr::W) writer structure"]
impl crate::Writable for APB4ENR {}
#[doc = "RCC APB4 Clock Register"]
pub mod apb4enr;
#[doc = "RCC APB4 Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_apb4enr](c1_apb4enr) module"]
pub type C1_APB4ENR = crate::Reg<u32, _C1_APB4ENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_APB4ENR;
#[doc = "`read()` method returns [c1_apb4enr::R](c1_apb4enr::R) reader structure"]
impl crate::Readable for C1_APB4ENR {}
#[doc = "`write(|w| ..)` method takes [c1_apb4enr::W](c1_apb4enr::W) writer structure"]
impl crate::Writable for C1_APB4ENR {}
#[doc = "RCC APB4 Clock Register"]
pub mod c1_apb4enr;
#[doc = "RCC AHB3 Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_ahb3lpenr](c1_ahb3lpenr) module"]
pub type C1_AHB3LPENR = crate::Reg<u32, _C1_AHB3LPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_AHB3LPENR;
#[doc = "`read()` method returns [c1_ahb3lpenr::R](c1_ahb3lpenr::R) reader structure"]
impl crate::Readable for C1_AHB3LPENR {}
#[doc = "`write(|w| ..)` method takes [c1_ahb3lpenr::W](c1_ahb3lpenr::W) writer structure"]
impl crate::Writable for C1_AHB3LPENR {}
#[doc = "RCC AHB3 Sleep Clock Register"]
pub mod c1_ahb3lpenr;
#[doc = "RCC AHB3 Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ahb3lpenr](ahb3lpenr) module"]
pub type AHB3LPENR = crate::Reg<u32, _AHB3LPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _AHB3LPENR;
#[doc = "`read()` method returns [ahb3lpenr::R](ahb3lpenr::R) reader structure"]
impl crate::Readable for AHB3LPENR {}
#[doc = "`write(|w| ..)` method takes [ahb3lpenr::W](ahb3lpenr::W) writer structure"]
impl crate::Writable for AHB3LPENR {}
#[doc = "RCC AHB3 Sleep Clock Register"]
pub mod ahb3lpenr;
#[doc = "RCC AHB1 Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ahb1lpenr](ahb1lpenr) module"]
pub type AHB1LPENR = crate::Reg<u32, _AHB1LPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _AHB1LPENR;
#[doc = "`read()` method returns [ahb1lpenr::R](ahb1lpenr::R) reader structure"]
impl crate::Readable for AHB1LPENR {}
#[doc = "`write(|w| ..)` method takes [ahb1lpenr::W](ahb1lpenr::W) writer structure"]
impl crate::Writable for AHB1LPENR {}
#[doc = "RCC AHB1 Sleep Clock Register"]
pub mod ahb1lpenr;
#[doc = "RCC AHB1 Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_ahb1lpenr](c1_ahb1lpenr) module"]
pub type C1_AHB1LPENR = crate::Reg<u32, _C1_AHB1LPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_AHB1LPENR;
#[doc = "`read()` method returns [c1_ahb1lpenr::R](c1_ahb1lpenr::R) reader structure"]
impl crate::Readable for C1_AHB1LPENR {}
#[doc = "`write(|w| ..)` method takes [c1_ahb1lpenr::W](c1_ahb1lpenr::W) writer structure"]
impl crate::Writable for C1_AHB1LPENR {}
#[doc = "RCC AHB1 Sleep Clock Register"]
pub mod c1_ahb1lpenr;
#[doc = "RCC AHB2 Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_ahb2lpenr](c1_ahb2lpenr) module"]
pub type C1_AHB2LPENR = crate::Reg<u32, _C1_AHB2LPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_AHB2LPENR;
#[doc = "`read()` method returns [c1_ahb2lpenr::R](c1_ahb2lpenr::R) reader structure"]
impl crate::Readable for C1_AHB2LPENR {}
#[doc = "`write(|w| ..)` method takes [c1_ahb2lpenr::W](c1_ahb2lpenr::W) writer structure"]
impl crate::Writable for C1_AHB2LPENR {}
#[doc = "RCC AHB2 Sleep Clock Register"]
pub mod c1_ahb2lpenr;
#[doc = "RCC AHB2 Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ahb2lpenr](ahb2lpenr) module"]
pub type AHB2LPENR = crate::Reg<u32, _AHB2LPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _AHB2LPENR;
#[doc = "`read()` method returns [ahb2lpenr::R](ahb2lpenr::R) reader structure"]
impl crate::Readable for AHB2LPENR {}
#[doc = "`write(|w| ..)` method takes [ahb2lpenr::W](ahb2lpenr::W) writer structure"]
impl crate::Writable for AHB2LPENR {}
#[doc = "RCC AHB2 Sleep Clock Register"]
pub mod ahb2lpenr;
#[doc = "RCC AHB4 Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ahb4lpenr](ahb4lpenr) module"]
pub type AHB4LPENR = crate::Reg<u32, _AHB4LPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _AHB4LPENR;
#[doc = "`read()` method returns [ahb4lpenr::R](ahb4lpenr::R) reader structure"]
impl crate::Readable for AHB4LPENR {}
#[doc = "`write(|w| ..)` method takes [ahb4lpenr::W](ahb4lpenr::W) writer structure"]
impl crate::Writable for AHB4LPENR {}
#[doc = "RCC AHB4 Sleep Clock Register"]
pub mod ahb4lpenr;
#[doc = "RCC AHB4 Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_ahb4lpenr](c1_ahb4lpenr) module"]
pub type C1_AHB4LPENR = crate::Reg<u32, _C1_AHB4LPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_AHB4LPENR;
#[doc = "`read()` method returns [c1_ahb4lpenr::R](c1_ahb4lpenr::R) reader structure"]
impl crate::Readable for C1_AHB4LPENR {}
#[doc = "`write(|w| ..)` method takes [c1_ahb4lpenr::W](c1_ahb4lpenr::W) writer structure"]
impl crate::Writable for C1_AHB4LPENR {}
#[doc = "RCC AHB4 Sleep Clock Register"]
pub mod c1_ahb4lpenr;
#[doc = "RCC APB3 Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_apb3lpenr](c1_apb3lpenr) module"]
pub type C1_APB3LPENR = crate::Reg<u32, _C1_APB3LPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_APB3LPENR;
#[doc = "`read()` method returns [c1_apb3lpenr::R](c1_apb3lpenr::R) reader structure"]
impl crate::Readable for C1_APB3LPENR {}
#[doc = "`write(|w| ..)` method takes [c1_apb3lpenr::W](c1_apb3lpenr::W) writer structure"]
impl crate::Writable for C1_APB3LPENR {}
#[doc = "RCC APB3 Sleep Clock Register"]
pub mod c1_apb3lpenr;
#[doc = "RCC APB3 Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [apb3lpenr](apb3lpenr) module"]
pub type APB3LPENR = crate::Reg<u32, _APB3LPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _APB3LPENR;
#[doc = "`read()` method returns [apb3lpenr::R](apb3lpenr::R) reader structure"]
impl crate::Readable for APB3LPENR {}
#[doc = "`write(|w| ..)` method takes [apb3lpenr::W](apb3lpenr::W) writer structure"]
impl crate::Writable for APB3LPENR {}
#[doc = "RCC APB3 Sleep Clock Register"]
pub mod apb3lpenr;
#[doc = "RCC APB1 Low Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [apb1llpenr](apb1llpenr) module"]
pub type APB1LLPENR = crate::Reg<u32, _APB1LLPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _APB1LLPENR;
#[doc = "`read()` method returns [apb1llpenr::R](apb1llpenr::R) reader structure"]
impl crate::Readable for APB1LLPENR {}
#[doc = "`write(|w| ..)` method takes [apb1llpenr::W](apb1llpenr::W) writer structure"]
impl crate::Writable for APB1LLPENR {}
#[doc = "RCC APB1 Low Sleep Clock Register"]
pub mod apb1llpenr;
#[doc = "RCC APB1 Low Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_apb1llpenr](c1_apb1llpenr) module"]
pub type C1_APB1LLPENR = crate::Reg<u32, _C1_APB1LLPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_APB1LLPENR;
#[doc = "`read()` method returns [c1_apb1llpenr::R](c1_apb1llpenr::R) reader structure"]
impl crate::Readable for C1_APB1LLPENR {}
#[doc = "`write(|w| ..)` method takes [c1_apb1llpenr::W](c1_apb1llpenr::W) writer structure"]
impl crate::Writable for C1_APB1LLPENR {}
#[doc = "RCC APB1 Low Sleep Clock Register"]
pub mod c1_apb1llpenr;
#[doc = "RCC APB1 High Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_apb1hlpenr](c1_apb1hlpenr) module"]
pub type C1_APB1HLPENR = crate::Reg<u32, _C1_APB1HLPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_APB1HLPENR;
#[doc = "`read()` method returns [c1_apb1hlpenr::R](c1_apb1hlpenr::R) reader structure"]
impl crate::Readable for C1_APB1HLPENR {}
#[doc = "`write(|w| ..)` method takes [c1_apb1hlpenr::W](c1_apb1hlpenr::W) writer structure"]
impl crate::Writable for C1_APB1HLPENR {}
#[doc = "RCC APB1 High Sleep Clock Register"]
pub mod c1_apb1hlpenr;
#[doc = "RCC APB1 High Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [apb1hlpenr](apb1hlpenr) module"]
pub type APB1HLPENR = crate::Reg<u32, _APB1HLPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _APB1HLPENR;
#[doc = "`read()` method returns [apb1hlpenr::R](apb1hlpenr::R) reader structure"]
impl crate::Readable for APB1HLPENR {}
#[doc = "`write(|w| ..)` method takes [apb1hlpenr::W](apb1hlpenr::W) writer structure"]
impl crate::Writable for APB1HLPENR {}
#[doc = "RCC APB1 High Sleep Clock Register"]
pub mod apb1hlpenr;
#[doc = "RCC APB2 Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [apb2lpenr](apb2lpenr) module"]
pub type APB2LPENR = crate::Reg<u32, _APB2LPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _APB2LPENR;
#[doc = "`read()` method returns [apb2lpenr::R](apb2lpenr::R) reader structure"]
impl crate::Readable for APB2LPENR {}
#[doc = "`write(|w| ..)` method takes [apb2lpenr::W](apb2lpenr::W) writer structure"]
impl crate::Writable for APB2LPENR {}
#[doc = "RCC APB2 Sleep Clock Register"]
pub mod apb2lpenr;
#[doc = "RCC APB2 Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_apb2lpenr](c1_apb2lpenr) module"]
pub type C1_APB2LPENR = crate::Reg<u32, _C1_APB2LPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_APB2LPENR;
#[doc = "`read()` method returns [c1_apb2lpenr::R](c1_apb2lpenr::R) reader structure"]
impl crate::Readable for C1_APB2LPENR {}
#[doc = "`write(|w| ..)` method takes [c1_apb2lpenr::W](c1_apb2lpenr::W) writer structure"]
impl crate::Writable for C1_APB2LPENR {}
#[doc = "RCC APB2 Sleep Clock Register"]
pub mod c1_apb2lpenr;
#[doc = "RCC APB4 Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1_apb4lpenr](c1_apb4lpenr) module"]
pub type C1_APB4LPENR = crate::Reg<u32, _C1_APB4LPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1_APB4LPENR;
#[doc = "`read()` method returns [c1_apb4lpenr::R](c1_apb4lpenr::R) reader structure"]
impl crate::Readable for C1_APB4LPENR {}
#[doc = "`write(|w| ..)` method takes [c1_apb4lpenr::W](c1_apb4lpenr::W) writer structure"]
impl crate::Writable for C1_APB4LPENR {}
#[doc = "RCC APB4 Sleep Clock Register"]
pub mod c1_apb4lpenr;
#[doc = "RCC APB4 Sleep Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [apb4lpenr](apb4lpenr) module"]
pub type APB4LPENR = crate::Reg<u32, _APB4LPENR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _APB4LPENR;
#[doc = "`read()` method returns [apb4lpenr::R](apb4lpenr::R) reader structure"]
impl crate::Readable for APB4LPENR {}
#[doc = "`write(|w| ..)` method takes [apb4lpenr::W](apb4lpenr::W) writer structure"]
impl crate::Writable for APB4LPENR {}
#[doc = "RCC APB4 Sleep Clock Register"]
pub mod apb4lpenr;
|
use alloc::string::String;
use core::slice;
use device_tree::{DeviceTree, Node};
use super::virtio::virtio_probe;
use super::CMDLINE;
const DEVICE_TREE_MAGIC: u32 = 0xd00dfeed;
fn walk_dt_node(dt: &Node) {
if let Ok(compatible) = dt.prop_str("compatible") {
// TODO: query this from table
if compatible == "virtio,mmio" {
virtio_probe(dt);
}
// TODO: initial other devices (16650, etc.)
}
if let Ok(bootargs) = dt.prop_str("bootargs") {
if bootargs.len() > 0 {
info!("Kernel cmdline: {}", bootargs);
*CMDLINE.write() = String::from(bootargs);
}
}
for child in dt.children.iter() {
walk_dt_node(child);
}
}
struct DtbHeader {
magic: u32,
size: u32,
}
pub fn init(dtb: usize) {
info!("DTB: {:#x}", dtb);
let header = unsafe { &*(dtb as *const DtbHeader) };
let magic = u32::from_be(header.magic);
if magic == DEVICE_TREE_MAGIC {
let size = u32::from_be(header.size);
let dtb_data = unsafe { slice::from_raw_parts(dtb as *const u8, size as usize) };
if let Ok(dt) = DeviceTree::load(dtb_data) {
//trace!("DTB: {:#x?}", dt);
walk_dt_node(&dt.root);
}
}
}
|
//! Where the StableAbi trait is declared,as well as related types/traits.
use core_extensions::type_level_bool::{Boolean, False, True};
use std::{
cell::{Cell, UnsafeCell},
marker::{PhantomData, PhantomPinned},
mem::ManuallyDrop,
num::{NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize, Wrapping},
pin::Pin,
ptr::NonNull,
sync::atomic::{AtomicBool, AtomicIsize, AtomicPtr, AtomicUsize},
};
use crate::{
abi_stability::get_static_equivalent::GetStaticEquivalent_,
reflection::ModReflMode,
sabi_types::Constructor,
std_types::{utypeid::UTypeId, RSlice},
type_layout::{
CompTLField, CompTLFields, DiscriminantRepr, GenericTLData, GenericTLEnum, ItemInfo,
LifetimeRange, MonoTLData, MonoTLEnum, MonoTypeLayout, ReprAttr, StartLen, TLDiscriminants,
TLPrimitive, TypeLayout,
},
};
///////////////////////
/// Represents a type whose layout is stable.
///
/// This trait can be derived using
/// [`#[derive(StableAbi)]`](derive@crate::StableAbi).
///
/// # Safety
///
/// The layout specified in `LAYOUT` must be correct,
/// otherwise type checking when loading a dynamic library would be unsound,
/// and passing this into a dynamic library would be equivalent to transmuting it.
///
/// # Caveats
///
/// This trait cannot be directly implemented for functions that take lifetime parameters,
/// because of that, [`#[derive(StableAbi)]`](derive@crate::StableAbi)
/// detects the presence of `extern fn` types in type definitions.
pub unsafe trait StableAbi: GetStaticEquivalent_ {
/// Whether this type has a single invalid bit-pattern.
///
/// Possible values: [`True`]/[`False`]
///
/// Some standard library types have a single value that is invalid for them eg:0,null.
/// these types are the only ones which can be stored in a `Option<_>` that implements StableAbi.
///
/// For an alternative to `Option<T>` for types where
/// `IsNonZeroType = False`, you can use [`ROption`].
///
/// Non-exhaustive list of std types that are NonZero:
///
/// - `&T` (any T).
///
/// - `&mut T` (any T).
///
/// - `extern "C" fn()`.
///
/// - `std::ptr::NonNull`
///
/// - `std::num::NonZero*`
///
/// [`True`]: crate::reexports::True
/// [`False`]: crate::reexports::False
/// [`ROption`]: crate::std_types::ROption
type IsNonZeroType: Boolean;
/// The layout of the type provided by implementors.
const LAYOUT: &'static TypeLayout;
/// `const`-equivalents of the associated types.
const ABI_CONSTS: AbiConsts = AbiConsts {
type_id: Constructor(crate::std_types::utypeid::new_utypeid::<Self::StaticEquivalent>),
is_nonzero: <Self::IsNonZeroType as Boolean>::VALUE,
};
}
/// A type that only has a stable layout when a `PrefixRef` to it is used.
///
/// Types that implement this trait usually have a `_Prefix` suffix.
///
/// # Safety
///
/// This trait can only be implemented by the `StableAbi` derive
/// on types that also use the `#[sabi(kind(Prefix))]` attribute,
/// implementing the trait for a macro generated type.
pub unsafe trait PrefixStableAbi: GetStaticEquivalent_ {
/// Whether this type has a single invalid bit-pattern.
type IsNonZeroType: Boolean;
/// The layout of the type, provided by implementors.
const LAYOUT: &'static TypeLayout;
/// `const`-equivalents of the associated types.
const ABI_CONSTS: AbiConsts = AbiConsts {
type_id: Constructor(crate::std_types::utypeid::new_utypeid::<Self::StaticEquivalent>),
is_nonzero: <Self::IsNonZeroType as Boolean>::VALUE,
};
}
///////////////////////
/// Contains constants equivalent to the associated types in StableAbi.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[repr(C)]
#[derive(StableAbi)]
pub struct AbiConsts {
/// A function to get the unique identifier for some type
pub type_id: Constructor<UTypeId>,
/// Whether the type uses non-zero value optimization,
/// if true then an `Option<Self>` implements StableAbi.
pub is_nonzero: bool,
}
impl AbiConsts {
/// Gets the `UTypeId` returned by the `type_id` field.
#[inline]
pub fn get_type_id(&self) -> UTypeId {
self.type_id.get()
}
}
///////////////////////////////////////////////////////////////////////////////
/// Retrieves the TypeLayout of `T: StableAbi`,
pub extern "C" fn get_type_layout<T>() -> &'static TypeLayout
where
T: StableAbi,
{
T::LAYOUT
}
/// Retrieves the TypeLayout of `T: PrefixStableAbi`,
pub extern "C" fn get_prefix_field_type_layout<T>() -> &'static TypeLayout
where
T: PrefixStableAbi,
{
<T as PrefixStableAbi>::LAYOUT
}
#[doc(hidden)]
pub extern "C" fn __sabi_opaque_field_type_layout<T>() -> &'static TypeLayout
where
T: StableAbi,
{
<UnsafeOpaqueField<T> as StableAbi>::LAYOUT
}
#[doc(hidden)]
pub extern "C" fn __opaque_field_type_layout<T>() -> &'static TypeLayout {
<UnsafeOpaqueField<T> as StableAbi>::LAYOUT
}
///////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////
//// Implementations
/////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////
unsafe impl<T> GetStaticEquivalent_ for PhantomData<T>
where
T: GetStaticEquivalent_,
{
type StaticEquivalent = PhantomData<T::StaticEquivalent>;
}
unsafe impl<T> StableAbi for PhantomData<T>
where
T: StableAbi,
{
type IsNonZeroType = False;
const LAYOUT: &'static TypeLayout = {
zst_assert!(Self);
const MONO_TYPE_LAYOUT: &MonoTypeLayout = &MonoTypeLayout::new(
*mono_shared_vars,
rstr!("PhantomData"),
ItemInfo::std_type_in(nulstr_trunc!("std::marker")),
MonoTLData::EMPTY,
tl_genparams!(;0;),
ReprAttr::C,
ModReflMode::Module,
{
const S: &[CompTLField] =
&[CompTLField::std_field(field0, LifetimeRange::EMPTY, 0)];
RSlice::from_slice(S)
},
);
make_shared_vars! {
impl[T] PhantomData<T>
where[T: StableAbi];
let (mono_shared_vars,shared_vars)={
strings={ field0:"0", },
type_layouts=[T],
};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Struct,
)
};
}
macro_rules! phantomdata_tuples {
(ignore; $($anything:tt)*)=>{ 1 };
(
$(($tuple_param:ident,$name_ident:ident=$name_str:literal))*
)=>{
unsafe impl<$($tuple_param,)*>
GetStaticEquivalent_
for PhantomData<($($tuple_param,)*)>
where
$($tuple_param:GetStaticEquivalent_,)*
{
type StaticEquivalent=PhantomData<($($tuple_param::StaticEquivalent,)*)>;
}
unsafe impl<$($tuple_param,)*>
StableAbi
for PhantomData<($($tuple_param,)*)>
where
$($tuple_param:StableAbi,)*
{
type IsNonZeroType = False;
const LAYOUT: &'static TypeLayout = {
zst_assert!(Self);
const MONO_TYPE_LAYOUT:&MonoTypeLayout=&MonoTypeLayout::new(
*mono_shared_vars,
rstr!("PhantomData"),
ItemInfo::std_type_in(nulstr_trunc!("std::marker")),
MonoTLData::EMPTY,
tl_genparams!(;0..COUNT;),
ReprAttr::C,
ModReflMode::Module,
unsafe{
RSlice::from_raw_parts_with_lifetime(FIELDS,COUNT)
}
);
#[allow(unused_assignments)]
const FIELDS:&'static [CompTLField;COUNT]={
let mut i=0;
$(
#[allow(non_snake_case)]
let $tuple_param=
CompTLField::std_field($name_ident,LifetimeRange::EMPTY,i);
i+=1;
)*
&[$($tuple_param,)*]
};
const COUNT:usize=$(phantomdata_tuples!(ignore;$tuple_param)+)* 0;
make_shared_vars!{
impl[$($tuple_param,)*] PhantomData<($($tuple_param,)*)>
where[
$($tuple_param:StableAbi,)*
];
let (mono_shared_vars,shared_vars)={
strings={ $($name_ident:$name_str,)* },
type_layouts=[$($tuple_param,)*],
};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Struct,
)
};
}
}
}
/*
fn main(){
for i in 1..=16{
println!("phantomdata_tuples!{{");
for j in 0..i{
println!(" (T{0},p{0}=\"{0}\")",j);
}
println!("}}")
}
}
*/
phantomdata_tuples! {
(T0,p0="0")
}
phantomdata_tuples! {
(T0,p0="0")
(T1,p1="1")
}
phantomdata_tuples! {
(T0,p0="0")
(T1,p1="1")
(T2,p2="2")
}
phantomdata_tuples! {
(T0,p0="0")
(T1,p1="1")
(T2,p2="2")
(T3,p3="3")
}
phantomdata_tuples! {
(T0,p0="0")
(T1,p1="1")
(T2,p2="2")
(T3,p3="3")
(T4,p4="4")
}
phantomdata_tuples! {
(T0,p0="0")
(T1,p1="1")
(T2,p2="2")
(T3,p3="3")
(T4,p4="4")
(T5,p5="5")
}
phantomdata_tuples! {
(T0,p0="0")
(T1,p1="1")
(T2,p2="2")
(T3,p3="3")
(T4,p4="4")
(T5,p5="5")
(T6,p6="6")
}
phantomdata_tuples! {
(T0,p0="0")
(T1,p1="1")
(T2,p2="2")
(T3,p3="3")
(T4,p4="4")
(T5,p5="5")
(T6,p6="6")
(T7,p7="7")
}
phantomdata_tuples! {
(T0,p0="0")
(T1,p1="1")
(T2,p2="2")
(T3,p3="3")
(T4,p4="4")
(T5,p5="5")
(T6,p6="6")
(T7,p7="7")
(T8,p8="8")
}
phantomdata_tuples! {
(T0,p0="0")
(T1,p1="1")
(T2,p2="2")
(T3,p3="3")
(T4,p4="4")
(T5,p5="5")
(T6,p6="6")
(T7,p7="7")
(T8,p8="8")
(T9,p9="9")
}
phantomdata_tuples! {
(T0,p0="0")
(T1,p1="1")
(T2,p2="2")
(T3,p3="3")
(T4,p4="4")
(T5,p5="5")
(T6,p6="6")
(T7,p7="7")
(T8,p8="8")
(T9,p9="9")
(T10,p10="10")
}
phantomdata_tuples! {
(T0,p0="0")
(T1,p1="1")
(T2,p2="2")
(T3,p3="3")
(T4,p4="4")
(T5,p5="5")
(T6,p6="6")
(T7,p7="7")
(T8,p8="8")
(T9,p9="9")
(T10,p10="10")
(T11,p11="11")
}
phantomdata_tuples! {
(T0,p0="0")
(T1,p1="1")
(T2,p2="2")
(T3,p3="3")
(T4,p4="4")
(T5,p5="5")
(T6,p6="6")
(T7,p7="7")
(T8,p8="8")
(T9,p9="9")
(T10,p10="10")
(T11,p11="11")
(T12,p12="12")
}
phantomdata_tuples! {
(T0,p0="0")
(T1,p1="1")
(T2,p2="2")
(T3,p3="3")
(T4,p4="4")
(T5,p5="5")
(T6,p6="6")
(T7,p7="7")
(T8,p8="8")
(T9,p9="9")
(T10,p10="10")
(T11,p11="11")
(T12,p12="12")
(T13,p13="13")
}
phantomdata_tuples! {
(T0,p0="0")
(T1,p1="1")
(T2,p2="2")
(T3,p3="3")
(T4,p4="4")
(T5,p5="5")
(T6,p6="6")
(T7,p7="7")
(T8,p8="8")
(T9,p9="9")
(T10,p10="10")
(T11,p11="11")
(T12,p12="12")
(T13,p13="13")
(T14,p14="14")
}
phantomdata_tuples! {
(T0,p0="0")
(T1,p1="1")
(T2,p2="2")
(T3,p3="3")
(T4,p4="4")
(T5,p5="5")
(T6,p6="6")
(T7,p7="7")
(T8,p8="8")
(T9,p9="9")
(T10,p10="10")
(T11,p11="11")
(T12,p12="12")
(T13,p13="13")
(T14,p14="14")
(T15,p15="15")
}
unsafe impl GetStaticEquivalent_ for () {
type StaticEquivalent = ();
}
unsafe impl StableAbi for () {
type IsNonZeroType = False;
const LAYOUT: &'static TypeLayout = {
const MONO_TYPE_LAYOUT: &MonoTypeLayout = &MonoTypeLayout::new(
*mono_shared_vars,
rstr!("()"),
ItemInfo::primitive(),
MonoTLData::EMPTY,
tl_genparams!(;;),
ReprAttr::C,
ModReflMode::Module,
RSlice::EMPTY,
);
make_shared_vars! {
impl[] ();
let (mono_shared_vars,shared_vars)={};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Struct,
)
};
}
/////////////
unsafe impl<'a, T> GetStaticEquivalent_ for &'a T
where
T: 'a + GetStaticEquivalent_,
{
type StaticEquivalent = &'static T::StaticEquivalent;
}
// Does not allow ?Sized types because the DST fat pointer does not have a stable layout.
unsafe impl<'a, T> StableAbi for &'a T
where
T: 'a + StableAbi,
{
type IsNonZeroType = True;
const LAYOUT: &'static TypeLayout = {
const MONO_TYPE_LAYOUT: &MonoTypeLayout = &MonoTypeLayout::new(
*mono_shared_vars,
rstr!("&"),
ItemInfo::primitive(),
MonoTLData::Primitive(TLPrimitive::SharedRef),
tl_genparams!('a;0;),
ReprAttr::Primitive,
ModReflMode::DelegateDeref { layout_index: 0 },
{
const S: &[CompTLField] =
&[CompTLField::std_field(field0, LifetimeRange::EMPTY, 0)];
RSlice::from_slice(S)
},
);
make_shared_vars! {
impl['a, T] &'a T
where[ T: 'a + StableAbi];
let (mono_shared_vars,shared_vars)={
strings={ field0:"0", },
type_layouts=[T],
};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Primitive,
)
};
}
unsafe impl<'a, T> GetStaticEquivalent_ for &'a mut T
where
T: 'a + GetStaticEquivalent_,
{
type StaticEquivalent = &'static mut T::StaticEquivalent;
}
// Does not allow ?Sized types because the DST fat pointer does not have a stable layout.
unsafe impl<'a, T> StableAbi for &'a mut T
where
T: 'a + StableAbi,
{
type IsNonZeroType = True;
const LAYOUT: &'static TypeLayout = {
const MONO_TYPE_LAYOUT: &MonoTypeLayout = &MonoTypeLayout::new(
*mono_shared_vars,
rstr!("&mut"),
ItemInfo::primitive(),
MonoTLData::Primitive(TLPrimitive::MutRef),
tl_genparams!('a;0;),
ReprAttr::Primitive,
ModReflMode::DelegateDeref { layout_index: 0 },
{
const S: &[CompTLField] =
&[CompTLField::std_field(field0, LifetimeRange::EMPTY, 0)];
RSlice::from_slice(S)
},
);
make_shared_vars! {
impl['a, T] &'a mut T
where[ T: 'a + StableAbi];
let (mono_shared_vars,shared_vars)={
strings={ field0:"0", },
type_layouts=[T],
};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Primitive,
)
};
}
unsafe impl<T> GetStaticEquivalent_ for NonNull<T>
where
T: GetStaticEquivalent_,
{
type StaticEquivalent = NonNull<T::StaticEquivalent>;
}
// Does not allow ?Sized types because the DST fat pointer does not have a stable layout.
unsafe impl<T> StableAbi for NonNull<T>
where
T: StableAbi,
{
type IsNonZeroType = True;
const LAYOUT: &'static TypeLayout = {
const MONO_TYPE_LAYOUT: &MonoTypeLayout = &MonoTypeLayout::new(
*mono_shared_vars,
rstr!("NonNull"),
ItemInfo::std_type_in(nulstr_trunc!("std::ptr")),
{
const S: &[CompTLField] =
&[CompTLField::std_field(field0, LifetimeRange::EMPTY, 1)];
MonoTLData::struct_(RSlice::from_slice(S))
},
tl_genparams!(;0;),
ReprAttr::Transparent,
ModReflMode::Module,
RSlice::EMPTY,
);
make_shared_vars! {
impl[T] NonNull<T>
where[ T: StableAbi];
let (mono_shared_vars,shared_vars)={
strings={ field0:"0", },
type_layouts=[T,*const T],
};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Struct,
)
};
}
unsafe impl<T> GetStaticEquivalent_ for AtomicPtr<T>
where
T: GetStaticEquivalent_,
{
type StaticEquivalent = AtomicPtr<T::StaticEquivalent>;
}
unsafe impl<T> StableAbi for AtomicPtr<T>
where
T: StableAbi,
{
type IsNonZeroType = False;
const LAYOUT: &'static TypeLayout = {
const MONO_TYPE_LAYOUT: &MonoTypeLayout = &MonoTypeLayout::new(
*mono_shared_vars,
rstr!("AtomicPtr"),
ItemInfo::std_type_in(nulstr_trunc!("std::sync::atomic")),
{
const S: &[CompTLField] =
&[CompTLField::std_field(field0, LifetimeRange::EMPTY, 1)];
MonoTLData::struct_(RSlice::from_slice(S))
},
tl_genparams!(;0;),
ReprAttr::Transparent,
ModReflMode::Module,
RSlice::EMPTY,
);
make_shared_vars! {
impl[T] AtomicPtr<T>
where[T: StableAbi];
let (mono_shared_vars,shared_vars)={
strings={ field0:"0", },
type_layouts=[T,*mut T],
};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Struct,
)
};
}
unsafe impl<T> GetStaticEquivalent_ for *const T
where
T: GetStaticEquivalent_,
{
type StaticEquivalent = *const T::StaticEquivalent;
}
// Does not allow ?Sized types because the DST fat pointer does not have a stable layout.
unsafe impl<T> StableAbi for *const T
where
T: StableAbi,
{
type IsNonZeroType = False;
const LAYOUT: &'static TypeLayout = {
const MONO_TYPE_LAYOUT: &MonoTypeLayout = &MonoTypeLayout::new(
*mono_shared_vars,
rstr!("*const"),
ItemInfo::primitive(),
MonoTLData::Primitive(TLPrimitive::ConstPtr),
tl_genparams!(;0;),
ReprAttr::Primitive,
ModReflMode::Module,
{
const S: &[CompTLField] =
&[CompTLField::std_field(field0, LifetimeRange::EMPTY, 0)];
RSlice::from_slice(S)
},
);
make_shared_vars! {
impl[T] *const T
where[T: StableAbi];
let (mono_shared_vars,shared_vars)={
strings={ field0:"0", },
type_layouts=[T],
};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Primitive,
)
};
}
unsafe impl<T> GetStaticEquivalent_ for *mut T
where
T: GetStaticEquivalent_,
{
type StaticEquivalent = *mut T::StaticEquivalent;
}
// Does not allow ?Sized types because the DST fat pointer does not have a stable layout.
unsafe impl<T> StableAbi for *mut T
where
T: StableAbi,
{
type IsNonZeroType = False;
const LAYOUT: &'static TypeLayout = {
const MONO_TYPE_LAYOUT: &MonoTypeLayout = &MonoTypeLayout::new(
*mono_shared_vars,
rstr!("*mut"),
ItemInfo::primitive(),
MonoTLData::Primitive(TLPrimitive::MutPtr),
tl_genparams!(;0;),
ReprAttr::Primitive,
ModReflMode::Module,
{
const S: &[CompTLField] =
&[CompTLField::std_field(field0, LifetimeRange::EMPTY, 0)];
RSlice::from_slice(S)
},
);
make_shared_vars! {
impl[T] *mut T
where[T: StableAbi];
let (mono_shared_vars,shared_vars)={
strings={ field0:"0", },
type_layouts=[T],
};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Primitive,
)
};
}
/////////////
macro_rules! impl_stable_abi_array {
() => {
unsafe impl<T, const N: usize> GetStaticEquivalent_ for [T; N]
where
T: GetStaticEquivalent_,
{
type StaticEquivalent = [T::StaticEquivalent; N];
}
unsafe impl<T, const N: usize> StableAbi for [T; N]
where
T: StableAbi,
{
type IsNonZeroType = False;
const LAYOUT: &'static TypeLayout = {
// Used to get constants for [T; N] where T doesn't matter
struct ArrayMonoConsts<const N: usize>;
impl<const N: usize> ArrayMonoConsts<N> {
const MONO_TYPE_LAYOUT: &'static MonoTypeLayout = &MonoTypeLayout::new(
*mono_shared_vars,
rstr!("array"),
ItemInfo::primitive(),
MonoTLData::Primitive(TLPrimitive::Array),
tl_genparams!(;0;0),
ReprAttr::Primitive,
ModReflMode::Module,
{
const S: &[CompTLField] =
&[CompTLField::std_field(field0, LifetimeRange::EMPTY, 0)];
RSlice::from_slice(S)
},
);
}
make_shared_vars! {
impl[T, const N: usize] [T; N]
where[T: StableAbi];
let (mono_shared_vars,shared_vars)={
strings={ field0:"element", },
type_layouts=[T],
constant=[usize => N],
};
}
&TypeLayout::from_std::<Self>(
shared_vars,
ArrayMonoConsts::<N>::MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Primitive,
)
};
}
};
}
impl_stable_abi_array! {}
/////////////
unsafe impl<T> GetStaticEquivalent_ for Option<T>
where
T: GetStaticEquivalent_,
{
type StaticEquivalent = Option<T::StaticEquivalent>;
}
/// Implementing abi stability for `Option<T>` is fine if
/// T is a NonZero primitive type.
unsafe impl<T> StableAbi for Option<T>
where
T: StableAbi<IsNonZeroType = True>,
{
type IsNonZeroType = False;
const LAYOUT: &'static TypeLayout = {
const MONO_TYPE_LAYOUT: &MonoTypeLayout = &MonoTypeLayout::new(
*mono_shared_vars,
rstr!("Option"),
ItemInfo::std_type_in(nulstr_trunc!("std::option")),
MonoTLData::Enum(MonoTLEnum::new(variant_names, rslice![1, 0], {
const S: &[CompTLField] =
&[CompTLField::std_field(field0, LifetimeRange::EMPTY, 0)];
CompTLFields::from_fields(RSlice::from_slice(S))
})),
tl_genparams!(;0;),
ReprAttr::OptionNonZero,
ModReflMode::Module,
RSlice::EMPTY,
);
make_shared_vars! {
impl[T] Option<T>
where [ T: StableAbi<IsNonZeroType = True>, ];
let (mono_shared_vars,shared_vars)={
strings={
variant_names:"Some;None;",
field0:"0",
},
type_layouts=[T],
};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Enum(GenericTLEnum::exhaustive(TLDiscriminants::from_u8_slice(
rslice![0, 1],
))),
)
};
}
/////////////
macro_rules! impl_for_primitive_ints {
(
$( ($type:ty,$type_name:literal,$tl_primitive:expr) ,)*
) => (
$(
unsafe impl GetStaticEquivalent_ for $type {
type StaticEquivalent=Self;
}
unsafe impl StableAbi for $type {
type IsNonZeroType=False;
const LAYOUT: &'static TypeLayout = {
const MONO_TYPE_LAYOUT:&MonoTypeLayout=&MonoTypeLayout::new(
*mono_shared_vars,
rstr!($type_name),
ItemInfo::primitive(),
MonoTLData::Primitive($tl_primitive),
tl_genparams!(;;),
ReprAttr::Primitive,
ModReflMode::Module,
RSlice::EMPTY,
);
make_shared_vars!{
impl[] $type;
let (mono_shared_vars,shared_vars)={
type_layouts=[],
};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Primitive,
)
};
}
)*
)
}
impl_for_primitive_ints! {
(u8 ,"u8" ,TLPrimitive::U8),
(i8 ,"i8" ,TLPrimitive::I8),
(u16 ,"u16" ,TLPrimitive::U16),
(i16 ,"i16" ,TLPrimitive::I16),
(u32 ,"u32" ,TLPrimitive::U32),
(i32 ,"i32" ,TLPrimitive::I32),
(u64 ,"u64" ,TLPrimitive::U64),
(i64 ,"i64" ,TLPrimitive::I64),
(usize,"usize",TLPrimitive::Usize),
(isize,"isize",TLPrimitive::Isize),
(bool ,"bool" ,TLPrimitive::Bool),
(f32 ,"f32" ,TLPrimitive::F32),
(f64 ,"f64" ,TLPrimitive::F64),
}
macro_rules! impl_for_concrete {
(
type IsNonZeroType=$zeroness:ty;
[
$( ($this:ty,$this_name:literal,$prim_repr:ty,$in_mod:expr) ,)*
]
) => (
$(
unsafe impl GetStaticEquivalent_ for $this {
type StaticEquivalent=Self;
}
unsafe impl StableAbi for $this {
type IsNonZeroType=$zeroness;
const LAYOUT: &'static TypeLayout = {
const MONO_TYPE_LAYOUT:&MonoTypeLayout=&MonoTypeLayout::new(
*mono_shared_vars,
rstr!($this_name),
ItemInfo::std_type_in(nulstr_trunc!($in_mod)),
{
const S: &[CompTLField] = &[
CompTLField::std_field(field0,LifetimeRange::EMPTY,0),
];
MonoTLData::struct_(RSlice::from_slice(S))
},
tl_genparams!(;;),
ReprAttr::Transparent,
ModReflMode::Module,
RSlice::EMPTY,
);
make_shared_vars!{
impl[] $this;
let (mono_shared_vars,shared_vars)={
strings={ field0:"0" },
type_layouts=[$prim_repr],
};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Struct,
)
};
}
)*
)
}
impl_for_concrete! {
type IsNonZeroType=False;
[
(AtomicBool ,"AtomicBool" ,bool,"std::sync::atomic"),
(AtomicIsize,"AtomicIsize",isize,"std::sync::atomic"),
(AtomicUsize,"AtomicUsize",usize,"std::sync::atomic"),
]
}
impl_for_concrete! {
type IsNonZeroType=True;
[
(NonZeroU8 ,"NonZeroU8" ,u8,"std::num"),
(NonZeroU16 ,"NonZeroU16" ,u16,"std::num"),
(NonZeroU32 ,"NonZeroU32" ,u32,"std::num"),
(NonZeroU64 ,"NonZeroU64" ,u64,"std::num"),
(NonZeroUsize,"NonZeroUsize",usize,"std::num"),
]
}
/////////////
mod rust_1_34_impls {
use super::*;
use core::num::*;
use std::sync::atomic::*;
impl_for_concrete! {
type IsNonZeroType=False;
[
(AtomicI8 ,"AtomicI8" ,i8,"std::sync::atomic"),
(AtomicI16,"AtomicI16",i16,"std::sync::atomic"),
(AtomicI32,"AtomicI32",i32,"std::sync::atomic"),
(AtomicI64,"AtomicI64",i64,"std::sync::atomic"),
(AtomicU8 ,"AtomicU8" ,u8,"std::sync::atomic"),
(AtomicU16,"AtomicU16",u16,"std::sync::atomic"),
(AtomicU32,"AtomicU32",u32,"std::sync::atomic"),
(AtomicU64,"AtomicU64",u64,"std::sync::atomic"),
]
}
impl_for_concrete! {
type IsNonZeroType=True;
[
(NonZeroI8 ,"NonZeroI8" ,i8,"core::num"),
(NonZeroI16 ,"NonZeroI16" ,i16,"core::num"),
(NonZeroI32 ,"NonZeroI32" ,i32,"core::num"),
(NonZeroI64 ,"NonZeroI64" ,i64,"core::num"),
(NonZeroIsize,"NonZeroIsize",isize,"core::num"),
]
}
}
mod rust_1_36_impls {
use super::*;
use std::mem::MaybeUninit;
unsafe impl<T> GetStaticEquivalent_ for MaybeUninit<T>
where
T: GetStaticEquivalent_,
{
type StaticEquivalent = MaybeUninit<T::StaticEquivalent>;
}
unsafe impl<T> StableAbi for MaybeUninit<T>
where
T: StableAbi,
{
// MaybeUninit blocks layout optimizations.
type IsNonZeroType = False;
const LAYOUT: &'static TypeLayout = {
const MONO_TYPE_LAYOUT: &MonoTypeLayout = &MonoTypeLayout::new(
*mono_shared_vars,
rstr!("MaybeUninit"),
ItemInfo::std_type_in(nulstr_trunc!("std::mem")),
{
const S: &[CompTLField] =
&[CompTLField::std_field(field0, LifetimeRange::EMPTY, 0)];
MonoTLData::struct_(RSlice::from_slice(S))
},
tl_genparams!(;0;),
// Using `ReprAttr::Transparent` so that if I add C header file translation
// it will be translated to just `T`.
ReprAttr::Transparent,
ModReflMode::Opaque,
RSlice::EMPTY,
);
make_shared_vars! {
impl[T] MaybeUninit<T>
where [T: StableAbi];
let (mono_shared_vars,shared_vars)={
strings={ field0:"value" },
type_layouts=[T],
};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Struct,
)
};
}
}
/////////////
macro_rules! impl_sabi_for_newtype {
(@trans transparent)=>{ P::IsNonZeroType };
(@trans C)=>{ False };
(
$type_constr:ident
$(where[ $($where_clause:tt)* ])* ,
$transparency:ident,
$type_name:literal,
$mod_path:expr
) => (
unsafe impl<P> GetStaticEquivalent_ for $type_constr<P>
where
P: GetStaticEquivalent_,
$($($where_clause)*)*
{
type StaticEquivalent=$type_constr<P::StaticEquivalent>;
}
unsafe impl<P> StableAbi for $type_constr<P>
where
P: StableAbi,
$($($where_clause)*)*
{
type IsNonZeroType = impl_sabi_for_newtype!(@trans $transparency);
const LAYOUT: &'static TypeLayout = {
const MONO_TYPE_LAYOUT:&MonoTypeLayout=&MonoTypeLayout::new(
*mono_shared_vars,
rstr!($type_name),
ItemInfo::std_type_in(nulstr_trunc!($mod_path)),
{
const S: &[CompTLField] = &[
CompTLField::std_field(field0,LifetimeRange::EMPTY,0),
];
MonoTLData::struct_(RSlice::from_slice(S))
},
tl_genparams!(;0;),
ReprAttr::Transparent,
ModReflMode::Module,
RSlice::EMPTY,
);
make_shared_vars!{
impl[P] $type_constr<P>
where [
P: StableAbi,
$($($where_clause)*)*
];
let (mono_shared_vars,shared_vars)={
strings={ field0:"0" },
type_layouts=[P],
};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Struct,
)
};
}
)
}
impl_sabi_for_newtype! { Wrapping ,transparent,"Wrapping" ,"std::num" }
impl_sabi_for_newtype! { Pin ,transparent,"Pin" ,"std::pin" }
impl_sabi_for_newtype! { ManuallyDrop,transparent,"ManuallyDrop","std::mem" }
impl_sabi_for_newtype! { Cell ,C,"Cell" ,"std::cell" }
impl_sabi_for_newtype! { UnsafeCell ,C,"UnsafeCell" ,"std::cell" }
/////////////
macro_rules! impl_stableabi_for_unit_struct {
(
$type_constr:ident,
$type_name:literal,
$item_info:expr
) => {
unsafe impl GetStaticEquivalent_ for $type_constr {
type StaticEquivalent = $type_constr;
}
unsafe impl StableAbi for $type_constr {
type IsNonZeroType = False;
const LAYOUT: &'static TypeLayout = {
const MONO_TYPE_LAYOUT: &MonoTypeLayout = &MonoTypeLayout::new(
*mono_shared_vars,
rstr!($type_name),
$item_info,
MonoTLData::struct_(RSlice::EMPTY),
tl_genparams!(;;),
ReprAttr::C,
ModReflMode::Module,
RSlice::EMPTY,
);
make_shared_vars! {
impl[] $type_constr;
let (mono_shared_vars,shared_vars)={};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Struct,
)
};
}
};
}
impl_stableabi_for_unit_struct! {
PhantomPinned,"PhantomPinned",ItemInfo::std_type_in(nulstr_trunc!("std::marker"))
}
/////////////
unsafe impl GetStaticEquivalent_ for ::core::ffi::c_void {
type StaticEquivalent = ::core::ffi::c_void;
}
unsafe impl StableAbi for ::core::ffi::c_void {
type IsNonZeroType = False;
const LAYOUT: &'static TypeLayout = {
const MONO_TYPE_LAYOUT: &MonoTypeLayout = &MonoTypeLayout::new(
*mono_shared_vars,
rstr!("c_void"),
ItemInfo::std_type_in(nulstr_trunc!("std::ffi")),
MonoTLData::EMPTY,
tl_genparams!(;;),
ReprAttr::C,
ModReflMode::Module,
RSlice::EMPTY,
);
make_shared_vars! {
impl[] ::core::ffi::c_void;
let (mono_shared_vars,shared_vars)={};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Struct,
)
};
}
/////////////
unsafe impl GetStaticEquivalent_ for core_extensions::Void {
type StaticEquivalent = Self;
}
unsafe impl StableAbi for core_extensions::Void {
type IsNonZeroType = False;
const LAYOUT: &'static TypeLayout = {
const MONO_TYPE_LAYOUT: &MonoTypeLayout = &MonoTypeLayout::new(
*mono_shared_vars,
rstr!("Void"),
ItemInfo::package_and_mod("core_extensions;0.0.0", nulstr_trunc!("core_extensions")),
MonoTLData::Enum(MonoTLEnum::new(
StartLen::EMPTY,
RSlice::EMPTY,
CompTLFields::EMPTY,
)),
tl_genparams!(;;),
ReprAttr::Int(DiscriminantRepr::U8),
ModReflMode::Module,
RSlice::EMPTY,
);
make_shared_vars! {
impl[] core_extensions::Void;
let (mono_shared_vars,shared_vars)={};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Enum(GenericTLEnum::exhaustive(TLDiscriminants::from_u8_slice(
RSlice::EMPTY,
))),
)
};
}
/////////////
/// The layout of `extern "C" fn()` and `unsafe extern "C" fn()`
macro_rules! empty_extern_fn_layout {
($this:ty) => {{
make_shared_vars! {
impl[] $this;
let (mono_shared_vars,shared_vars)={};
}
const MONO_TL_EXTERN_FN: &'static MonoTypeLayout = &MonoTypeLayout::new(
*mono_shared_vars,
rstr!("AFunctionPointer"),
make_item_info!(),
MonoTLData::Opaque,
tl_genparams!(;;),
ReprAttr::C,
ModReflMode::Opaque,
RSlice::EMPTY,
);
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TL_EXTERN_FN,
Self::ABI_CONSTS,
GenericTLData::Opaque,
)
}};
}
/// This is the only function type that implements StableAbi
/// so as to make it more obvious that functions involving lifetimes
/// cannot implement this trait directly (because of higher ranked trait bounds).
unsafe impl GetStaticEquivalent_ for extern "C" fn() {
type StaticEquivalent = Self;
}
unsafe impl StableAbi for extern "C" fn() {
type IsNonZeroType = True;
const LAYOUT: &'static TypeLayout = empty_extern_fn_layout!(extern "C" fn());
}
/// This is the only function type that implements StableAbi
/// so as to make it more obvious that functions involving lifetimes
/// cannot implement this trait directly (because of higher ranked trait bounds).
unsafe impl GetStaticEquivalent_ for unsafe extern "C" fn() {
type StaticEquivalent = Self;
}
unsafe impl StableAbi for unsafe extern "C" fn() {
type IsNonZeroType = True;
const LAYOUT: &'static TypeLayout = empty_extern_fn_layout!(unsafe extern "C" fn());
}
/// A function that returns the TypeLayout of an `unsafe extern "C" fn()`
#[doc(hidden)]
pub const UNSAFE_EXTERN_FN_LAYOUT: extern "C" fn() -> &'static TypeLayout =
get_type_layout::<unsafe extern "C" fn()>;
/// A function that returns the TypeLayout of an `extern "C" fn()`
#[doc(hidden)]
pub const EXTERN_FN_LAYOUT: extern "C" fn() -> &'static TypeLayout =
get_type_layout::<extern "C" fn()>;
/////////////
/// Allows one to create the `TypeLayout` for any type `T`,
/// by pretending that it is a primitive type.
///
/// Used by the StableAbi derive macro by fields marker as `#[sabi(unsafe_opaque_field)]`.
///
/// # Safety
///
/// You must ensure that the layout of `T` is compatible through other means.
#[repr(transparent)]
pub struct UnsafeOpaqueField<T>(T);
unsafe impl<T> GetStaticEquivalent_ for UnsafeOpaqueField<T> {
/// it is fine to use `()` because this type is treated as opaque anyway.
type StaticEquivalent = ();
}
unsafe impl<T> StableAbi for UnsafeOpaqueField<T> {
type IsNonZeroType = False;
const LAYOUT: &'static TypeLayout = {
const MONO_TYPE_LAYOUT: &MonoTypeLayout = &MonoTypeLayout::new(
*mono_shared_vars,
rstr!("OpaqueField"),
make_item_info!(),
MonoTLData::Opaque,
tl_genparams!(;;),
ReprAttr::C,
ModReflMode::Module,
RSlice::EMPTY,
);
make_shared_vars! {
impl[T] UnsafeOpaqueField<T>;
let (mono_shared_vars,shared_vars)={};
}
&TypeLayout::from_std::<Self>(
shared_vars,
MONO_TYPE_LAYOUT,
Self::ABI_CONSTS,
GenericTLData::Opaque,
)
};
}
/// Allows one to ensure that a `T` implements `StableAbi`,
/// while storing an opaque layout instead of `<T as StableAbi>::LAYOUT`.
///
/// Used by the `StableAbi` derive macro by fields marker as `#[sabi(unsafe_sabi_opaque_field)]`.
///
/// # Safety
///
/// You must ensure that the layout of `T` is compatible through other means.
#[repr(transparent)]
pub struct SabiUnsafeOpaqueField<T>(T);
unsafe impl<T> GetStaticEquivalent_ for SabiUnsafeOpaqueField<T> {
/// it is fine to use `()` because this type is treated as opaque anyway.
type StaticEquivalent = ();
}
unsafe impl<T> StableAbi for SabiUnsafeOpaqueField<T>
where
T: StableAbi,
{
type IsNonZeroType = False;
const LAYOUT: &'static TypeLayout = { <UnsafeOpaqueField<T>>::LAYOUT };
}
/////////////
|
use core::ptr::null_mut;
use {ffi, Bitmap};
pub struct BitmapGlyph {
library_raw: ffi::FT_Library,
raw: ffi::FT_BitmapGlyph
}
impl BitmapGlyph {
pub unsafe fn from_raw(library_raw: ffi::FT_Library, raw: ffi::FT_BitmapGlyph) -> Self {
ffi::FT_Reference_Library(library_raw);
BitmapGlyph { library_raw, raw }
}
#[inline(always)]
pub fn left(&self) -> i32 { unsafe { (*self.raw).left } }
#[inline(always)]
pub fn top(&self) -> i32 { unsafe { (*self.raw).top } }
#[inline(always)]
pub fn bitmap(&self) -> Bitmap {
unsafe { Bitmap::from_raw(&(*self.raw).bitmap) }
}
#[inline(always)]
pub fn raw(&self) -> &ffi::FT_BitmapGlyphRec { unsafe { &*self.raw } }
}
impl ::fallible::TryClone for BitmapGlyph {
type Error = ::error::Error;
fn try_clone(&self) -> ::FtResult<Self> { unsafe {
let mut target = null_mut();
::error::from_ftret(ffi::FT_Glyph_Copy(self.raw as ffi::FT_Glyph, &mut target))?;
Ok(BitmapGlyph::from_raw(self.library_raw, target as ffi::FT_BitmapGlyph))
} }
}
impl Drop for BitmapGlyph {
fn drop(&mut self) { unsafe {
ffi::FT_Done_Glyph(self.raw as ffi::FT_Glyph);
::error::from_ftret(ffi::FT_Done_Library(self.library_raw)).expect("Failed to drop bitmap glyph");
} }
}
|
use bytemuck::{Pod, Zeroable};
use glam::Vec3;
use lucien_core::logger::logger;
use slog::warn;
#[repr(C)]
#[derive(Default, Debug, Copy, Clone)]
pub struct Vertex {
pub position: [f32; 3],
pub normal: [f32; 3],
pub tex_coord: [f32; 2],
}
unsafe impl Pod for Vertex {}
unsafe impl Zeroable for Vertex {}
#[allow(dead_code)]
fn compute_normal(v0: &[f32; 3], v1: &[f32; 3], v2: &[f32; 3]) -> [f32; 3] {
let n0 = Vec3::from(*v0);
let n1 = Vec3::from(*v1);
let n2 = Vec3::from(*v2);
let f0 = n0 - n2;
let f2 = n1 - n2;
f0.cross(f2).normalize().into()
}
impl Vertex {
pub fn from_tobj(mesh: &tobj::Mesh) -> Vec<Vertex> {
let mut vertices: Vec<Vertex> = vec![];
for i in 0..mesh.positions.len() / 3 {
vertices.push(Self {
position: [
mesh.positions[i * 3],
mesh.positions[i * 3 + 1],
mesh.positions[i * 3 + 2],
],
normal: if !mesh.normals.is_empty() {
[
mesh.normals[i * 3],
mesh.normals[i * 3 + 1],
mesh.normals[i * 3 + 2],
]
} else {
[0.0, 0.0, 0.0]
},
tex_coord: if !mesh.texcoords.is_empty() {
[mesh.texcoords[i * 2], mesh.texcoords[i * 2 + 1]]
} else {
[0.0, 0.0]
},
});
}
let logger = logger();
// todo calculate normals after I figured out what is the correct face...
if mesh.texcoords.is_empty() {
warn!(logger, "texture coord missing for mesh");
}
if mesh.normals.is_empty() {
warn!(logger, "normals missing for mesh");
}
vertices
}
pub fn desc<'a>() -> wgpu::VertexBufferDescriptor<'a> {
wgpu::VertexBufferDescriptor {
stride: std::mem::size_of::<Vertex>() as wgpu::BufferAddress,
step_mode: wgpu::InputStepMode::Vertex,
attributes: &[
wgpu::VertexAttributeDescriptor {
offset: 0,
shader_location: 0,
format: wgpu::VertexFormat::Float3,
},
wgpu::VertexAttributeDescriptor {
offset: std::mem::size_of::<[f32; 3]>() as wgpu::BufferAddress,
shader_location: 1,
format: wgpu::VertexFormat::Float3,
},
wgpu::VertexAttributeDescriptor {
offset: std::mem::size_of::<[f32; 3]>() as wgpu::BufferAddress,
shader_location: 2,
format: wgpu::VertexFormat::Float2,
},
],
}
}
}
|
#[doc = r"Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r"Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::FIFOCTL {
#[doc = r"Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
self.register.set(f(&R { bits }, &mut W { bits }).bits);
}
#[doc = r"Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r"Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
self.register.set(
f(&mut W {
bits: Self::reset_value(),
})
.bits,
);
}
#[doc = r"Reset value of the register"]
#[inline(always)]
pub const fn reset_value() -> u32 {
0
}
#[doc = r"Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.register.set(Self::reset_value())
}
}
#[doc = r"Value of the field"]
pub struct I2C_FIFOCTL_TXTRIGR {
bits: u8,
}
impl I2C_FIFOCTL_TXTRIGR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r"Proxy"]
pub struct _I2C_FIFOCTL_TXTRIGW<'a> {
w: &'a mut W,
}
impl<'a> _I2C_FIFOCTL_TXTRIGW<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits &= !(7 << 0);
self.w.bits |= ((value as u32) & 7) << 0;
self.w
}
}
#[doc = r"Value of the field"]
pub struct I2C_FIFOCTL_DMATXENAR {
bits: bool,
}
impl I2C_FIFOCTL_DMATXENAR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _I2C_FIFOCTL_DMATXENAW<'a> {
w: &'a mut W,
}
impl<'a> _I2C_FIFOCTL_DMATXENAW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 13);
self.w.bits |= ((value as u32) & 1) << 13;
self.w
}
}
#[doc = r"Value of the field"]
pub struct I2C_FIFOCTL_TXFLUSHR {
bits: bool,
}
impl I2C_FIFOCTL_TXFLUSHR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _I2C_FIFOCTL_TXFLUSHW<'a> {
w: &'a mut W,
}
impl<'a> _I2C_FIFOCTL_TXFLUSHW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 14);
self.w.bits |= ((value as u32) & 1) << 14;
self.w
}
}
#[doc = r"Value of the field"]
pub struct I2C_FIFOCTL_TXASGNMTR {
bits: bool,
}
impl I2C_FIFOCTL_TXASGNMTR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _I2C_FIFOCTL_TXASGNMTW<'a> {
w: &'a mut W,
}
impl<'a> _I2C_FIFOCTL_TXASGNMTW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 15);
self.w.bits |= ((value as u32) & 1) << 15;
self.w
}
}
#[doc = r"Value of the field"]
pub struct I2C_FIFOCTL_RXTRIGR {
bits: u8,
}
impl I2C_FIFOCTL_RXTRIGR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r"Proxy"]
pub struct _I2C_FIFOCTL_RXTRIGW<'a> {
w: &'a mut W,
}
impl<'a> _I2C_FIFOCTL_RXTRIGW<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits &= !(7 << 16);
self.w.bits |= ((value as u32) & 7) << 16;
self.w
}
}
#[doc = r"Value of the field"]
pub struct I2C_FIFOCTL_DMARXENAR {
bits: bool,
}
impl I2C_FIFOCTL_DMARXENAR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _I2C_FIFOCTL_DMARXENAW<'a> {
w: &'a mut W,
}
impl<'a> _I2C_FIFOCTL_DMARXENAW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 29);
self.w.bits |= ((value as u32) & 1) << 29;
self.w
}
}
#[doc = r"Value of the field"]
pub struct I2C_FIFOCTL_RXFLUSHR {
bits: bool,
}
impl I2C_FIFOCTL_RXFLUSHR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _I2C_FIFOCTL_RXFLUSHW<'a> {
w: &'a mut W,
}
impl<'a> _I2C_FIFOCTL_RXFLUSHW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 30);
self.w.bits |= ((value as u32) & 1) << 30;
self.w
}
}
#[doc = r"Value of the field"]
pub struct I2C_FIFOCTL_RXASGNMTR {
bits: bool,
}
impl I2C_FIFOCTL_RXASGNMTR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _I2C_FIFOCTL_RXASGNMTW<'a> {
w: &'a mut W,
}
impl<'a> _I2C_FIFOCTL_RXASGNMTW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 31);
self.w.bits |= ((value as u32) & 1) << 31;
self.w
}
}
impl R {
#[doc = r"Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:2 - TX FIFO Trigger"]
#[inline(always)]
pub fn i2c_fifoctl_txtrig(&self) -> I2C_FIFOCTL_TXTRIGR {
let bits = ((self.bits >> 0) & 7) as u8;
I2C_FIFOCTL_TXTRIGR { bits }
}
#[doc = "Bit 13 - DMA TX Channel Enable"]
#[inline(always)]
pub fn i2c_fifoctl_dmatxena(&self) -> I2C_FIFOCTL_DMATXENAR {
let bits = ((self.bits >> 13) & 1) != 0;
I2C_FIFOCTL_DMATXENAR { bits }
}
#[doc = "Bit 14 - TX FIFO Flush"]
#[inline(always)]
pub fn i2c_fifoctl_txflush(&self) -> I2C_FIFOCTL_TXFLUSHR {
let bits = ((self.bits >> 14) & 1) != 0;
I2C_FIFOCTL_TXFLUSHR { bits }
}
#[doc = "Bit 15 - TX Control Assignment"]
#[inline(always)]
pub fn i2c_fifoctl_txasgnmt(&self) -> I2C_FIFOCTL_TXASGNMTR {
let bits = ((self.bits >> 15) & 1) != 0;
I2C_FIFOCTL_TXASGNMTR { bits }
}
#[doc = "Bits 16:18 - RX FIFO Trigger"]
#[inline(always)]
pub fn i2c_fifoctl_rxtrig(&self) -> I2C_FIFOCTL_RXTRIGR {
let bits = ((self.bits >> 16) & 7) as u8;
I2C_FIFOCTL_RXTRIGR { bits }
}
#[doc = "Bit 29 - DMA RX Channel Enable"]
#[inline(always)]
pub fn i2c_fifoctl_dmarxena(&self) -> I2C_FIFOCTL_DMARXENAR {
let bits = ((self.bits >> 29) & 1) != 0;
I2C_FIFOCTL_DMARXENAR { bits }
}
#[doc = "Bit 30 - RX FIFO Flush"]
#[inline(always)]
pub fn i2c_fifoctl_rxflush(&self) -> I2C_FIFOCTL_RXFLUSHR {
let bits = ((self.bits >> 30) & 1) != 0;
I2C_FIFOCTL_RXFLUSHR { bits }
}
#[doc = "Bit 31 - RX Control Assignment"]
#[inline(always)]
pub fn i2c_fifoctl_rxasgnmt(&self) -> I2C_FIFOCTL_RXASGNMTR {
let bits = ((self.bits >> 31) & 1) != 0;
I2C_FIFOCTL_RXASGNMTR { bits }
}
}
impl W {
#[doc = r"Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:2 - TX FIFO Trigger"]
#[inline(always)]
pub fn i2c_fifoctl_txtrig(&mut self) -> _I2C_FIFOCTL_TXTRIGW {
_I2C_FIFOCTL_TXTRIGW { w: self }
}
#[doc = "Bit 13 - DMA TX Channel Enable"]
#[inline(always)]
pub fn i2c_fifoctl_dmatxena(&mut self) -> _I2C_FIFOCTL_DMATXENAW {
_I2C_FIFOCTL_DMATXENAW { w: self }
}
#[doc = "Bit 14 - TX FIFO Flush"]
#[inline(always)]
pub fn i2c_fifoctl_txflush(&mut self) -> _I2C_FIFOCTL_TXFLUSHW {
_I2C_FIFOCTL_TXFLUSHW { w: self }
}
#[doc = "Bit 15 - TX Control Assignment"]
#[inline(always)]
pub fn i2c_fifoctl_txasgnmt(&mut self) -> _I2C_FIFOCTL_TXASGNMTW {
_I2C_FIFOCTL_TXASGNMTW { w: self }
}
#[doc = "Bits 16:18 - RX FIFO Trigger"]
#[inline(always)]
pub fn i2c_fifoctl_rxtrig(&mut self) -> _I2C_FIFOCTL_RXTRIGW {
_I2C_FIFOCTL_RXTRIGW { w: self }
}
#[doc = "Bit 29 - DMA RX Channel Enable"]
#[inline(always)]
pub fn i2c_fifoctl_dmarxena(&mut self) -> _I2C_FIFOCTL_DMARXENAW {
_I2C_FIFOCTL_DMARXENAW { w: self }
}
#[doc = "Bit 30 - RX FIFO Flush"]
#[inline(always)]
pub fn i2c_fifoctl_rxflush(&mut self) -> _I2C_FIFOCTL_RXFLUSHW {
_I2C_FIFOCTL_RXFLUSHW { w: self }
}
#[doc = "Bit 31 - RX Control Assignment"]
#[inline(always)]
pub fn i2c_fifoctl_rxasgnmt(&mut self) -> _I2C_FIFOCTL_RXASGNMTW {
_I2C_FIFOCTL_RXASGNMTW { w: self }
}
}
|
//! Simple, CPU cache-friendly epoch-based reclamation (EBR).
//!
//! ```rust
//! use rsdb::ebr::Ebr;
//!
//! let mut ebr: Ebr<Box<u64>> = Ebr::default();
//!
//! let mut guard = ebr.pin();
//!
//! guard.defer_drop(Box::new(1));
//! ```
use std::{
collections::{BTreeMap, VecDeque},
mem::{take, MaybeUninit},
num::NonZeroU64,
sync::RwLock,
sync::{
atomic::{
fence, AtomicU64,
Ordering::{Acquire, Relaxed, Release},
},
mpsc::{channel, Receiver, Sender},
Arc, Mutex,
},
};
const BUMP_EPOCH_OPS: u64 = 1024;
const BUMP_EPOCH_TRAILING_ZEROS: u32 = BUMP_EPOCH_OPS.trailing_zeros();
const GARBAGE_SLOTS_PER_BAG: usize = 128;
#[derive(Debug)]
pub struct Ebr<T: Send + 'static> {
// total collector count for id generation
collectors: Arc<AtomicU64>,
// the unique ID for this Ebr handle
local_id: u64,
// the quiescent epoch for this Ebr handle
local_quiescent_epoch: Arc<AtomicU64>,
// map from collector id to its quiescent epoch
registry: Arc<RwLock<BTreeMap<u64, Arc<AtomicU64>>>>,
// the highest epoch that gc is safe for
global_quiescent_epoch: Arc<AtomicU64>,
// new garbage gets assigned this epoch
global_current_epoch: Arc<AtomicU64>,
// epoch-tagged garbage waiting to be safely dropped
garbage_queue: VecDeque<Bag<T>>,
// new garbage accumulates here first
current_garbage_bag: Bag<T>,
// receives garbage from terminated threads
maintenance_lock: Arc<Mutex<Receiver<Bag<T>>>>,
// send outstanding garbage here when this Ebr drops
orphan_sender: Sender<Bag<T>>,
// count of pin attempts from this collector
pins: u64,
}
impl<T: Send + 'static> Drop for Ebr<T> {
fn drop(&mut self) {
// Send all outstanding garbage to the orphan queue.
for old_bag in take(&mut self.garbage_queue) {
self.orphan_sender.send(old_bag).unwrap();
}
if self.current_garbage_bag.len > 0 {
let mut full_bag = take(&mut self.current_garbage_bag);
full_bag.seal(self.global_current_epoch.load(Acquire));
self.orphan_sender.send(full_bag).unwrap();
}
self.registry
.write()
.unwrap()
.remove(&self.local_id)
.expect("unknown id deregistered from Ebr");
}
}
impl<T: Send + 'static> Default for Ebr<T> {
fn default() -> Ebr<T> {
let collectors = Arc::new(AtomicU64::new(0));
let local_id = collectors.fetch_add(1, Relaxed);
let current_epoch = 1;
let quiescent_epoch = current_epoch - 1;
let local_quiescent_epoch = Arc::new(AtomicU64::new(quiescent_epoch));
let registry = vec![(local_id, local_quiescent_epoch.clone())]
.into_iter()
.collect();
let (tx, rx) = channel();
Ebr {
collectors,
registry: Arc::new(RwLock::new(registry)),
local_id,
local_quiescent_epoch,
global_current_epoch: Arc::new(AtomicU64::new(current_epoch)),
global_quiescent_epoch: Arc::new(AtomicU64::new(quiescent_epoch)),
garbage_queue: Default::default(),
current_garbage_bag: Bag::default(),
maintenance_lock: Arc::new(Mutex::new(rx)),
orphan_sender: tx,
pins: 0,
}
}
}
impl<T: Send + 'static> Clone for Ebr<T> {
fn clone(&self) -> Ebr<T> {
let local_id = self.collectors.fetch_add(1, Relaxed);
let global_current_epoch = self.global_current_epoch.load(Acquire);
let local_quiescent_epoch = Arc::new(AtomicU64::new(global_current_epoch));
self.registry
.write()
.unwrap()
.insert(local_id, local_quiescent_epoch.clone());
Ebr {
collectors: self.collectors.clone(),
registry: self.registry.clone(),
local_id,
local_quiescent_epoch,
global_quiescent_epoch: self.global_quiescent_epoch.clone(),
global_current_epoch: self.global_current_epoch.clone(),
garbage_queue: Default::default(),
current_garbage_bag: Bag::default(),
maintenance_lock: self.maintenance_lock.clone(),
orphan_sender: self.orphan_sender.clone(),
pins: 0,
}
}
}
impl<T: Send + 'static> Ebr<T> {
pub fn pin(&mut self) -> Guard<'_, T> {
self.pins += 1;
let global_current_epoch = self.global_current_epoch.load(Relaxed);
self.local_quiescent_epoch
.store(global_current_epoch, Release);
let should_bump_epoch = self.pins.trailing_zeros() == BUMP_EPOCH_TRAILING_ZEROS;
if should_bump_epoch {
self.maintenance();
}
Guard { ebr: self }
}
#[cold]
fn maintenance(&mut self) {
self.global_current_epoch.fetch_add(1, Relaxed);
let orphans_rx = if let Ok(orphans_rx) = self.maintenance_lock.try_lock() {
orphans_rx
} else {
return;
};
// we have now been "elected" global maintainer,
// which has responsibility for:
// * bumping the global quiescent epoch
// * clearing the orphan garbage queue
let global_quiescent_epoch = self
.registry
.read()
.unwrap()
.values()
.map(|v| v.load(Relaxed))
.min()
.unwrap();
fence(Release);
assert_ne!(global_quiescent_epoch, u64::MAX);
self.global_quiescent_epoch
.fetch_max(global_quiescent_epoch, Release);
while let Ok(bag) = orphans_rx.try_recv() {
if bag.final_epoch.unwrap().get() < global_quiescent_epoch {
drop(bag)
} else {
self.garbage_queue.push_back(bag);
}
}
}
}
pub struct Guard<'a, T: Send + 'static> {
ebr: &'a mut Ebr<T>,
}
impl<'a, T: Send + 'static> Drop for Guard<'a, T> {
fn drop(&mut self) {
// set this to a large number to ensure it is not counted by `min_epoch()`
self.ebr.local_quiescent_epoch.store(u64::MAX, Release);
}
}
impl<'a, T: Send + 'static> Guard<'a, T> {
pub fn defer_drop(&mut self, item: T) {
self.ebr.current_garbage_bag.push(item);
if self.ebr.current_garbage_bag.is_full() {
let mut full_bag = take(&mut self.ebr.current_garbage_bag);
let global_current_epoch = self.ebr.global_current_epoch.load(Acquire);
full_bag.seal(global_current_epoch);
self.ebr.garbage_queue.push_back(full_bag);
let quiescent = self.ebr.global_quiescent_epoch.load(Acquire);
assert!(global_current_epoch > quiescent);
while self
.ebr
.garbage_queue
.front()
.unwrap()
.final_epoch
.unwrap()
.get()
< quiescent
{
let bag = self.ebr.garbage_queue.pop_front().unwrap();
drop(bag);
}
}
}
}
#[derive(Debug)]
struct Bag<T> {
garbage: [MaybeUninit<T>; GARBAGE_SLOTS_PER_BAG],
final_epoch: Option<NonZeroU64>,
len: usize,
}
impl<T> Drop for Bag<T> {
fn drop(&mut self) {
for index in 0..self.len {
unsafe {
self.garbage[index].as_mut_ptr().drop_in_place();
}
}
}
}
impl<T> Bag<T> {
fn push(&mut self, item: T) {
debug_assert!(self.len < GARBAGE_SLOTS_PER_BAG);
unsafe {
self.garbage[self.len].as_mut_ptr().write(item);
}
self.len += 1;
}
const fn is_full(&self) -> bool {
self.len == GARBAGE_SLOTS_PER_BAG
}
fn seal(&mut self, epoch: u64) {
self.final_epoch = Some(NonZeroU64::new(epoch).unwrap());
}
}
impl<T: Send + 'static> Default for Bag<T> {
fn default() -> Bag<T> {
Bag {
final_epoch: None,
len: 0,
garbage: unsafe {
MaybeUninit::<[MaybeUninit<T>; GARBAGE_SLOTS_PER_BAG]>::uninit().assume_init()
},
}
}
}
|
use libc::{fopen, mmap, PROT_READ, PROT_WRITE, MAP_PRIVATE, msync, munmap, MS_ASYNC, fclose};
use std::mem::size_of;
use std::os::raw::c_void;
use std::ptr;
struct Record {
id: u32,
name: u32
}
fn main() {
unsafe {
let fp_name = b"mmap.dat\x00".as_ptr();
let fp = fopen(fp_name as *const i8, b"wb\x00".as_ptr() as *const i8);
if fp.is_null(){
println!("open failure");
return;
}
let total = 100usize;
let mmap_size = size_of::<Record>() * total;
println!("mmap size {}", mmap_size);
let mapped = mmap(ptr::null_mut(), mmap_size, PROT_READ | PROT_WRITE, MAP_PRIVATE, fp as i32, 0) as *mut Record;
println!("mapped {:?}", mapped);
let mut record = Record {
id: 0,
name: 0,
};
for i in 0..total {
record.id = i as u32;
record.name = i as u32
}
(*mapped.offset(43)).id = 243;
(*mapped.offset(43)).name = 243;
msync(mapped as *mut c_void, mmap_size, MS_ASYNC);
println!("msync {:?}", mapped);
munmap(mapped as *mut c_void, mmap_size);
println!("munmap {:?}", mapped);
fclose(fp);
}
} |
use embedded_hal::{
digital::v2::OutputPin,
timer::{CountDown, Periodic},
};
use nb::block;
use stm32f4xx_hal::{prelude::*, time::Hertz};
const SYMBOLS: [u8; 16] = [
0xd, 0xe, 0x13, 0x15, 0x16, 0x19, 0x1a, 0x1c, 0x23, 0x25, 0x26, 0x29, 0x2a, 0x2c, 0x32, 0x34,
];
#[derive(Debug)]
pub enum Error {
Port,
}
pub struct RadioHeadASK<P, T>
where
P: OutputPin,
T: CountDown + Periodic,
{
pin: P,
timer: T,
fcs: u16,
buffer: [u8; 128],
buffer_ptr: usize,
}
impl<P, T> RadioHeadASK<P, T>
where
P: OutputPin,
T: CountDown<Time = Hertz> + Periodic,
{
pub fn new(mut pin: P, timer: T) -> Self {
match pin.set_low() {
Ok(_) => {}
Err(_) => panic!("Couldn't set pin to low!"),
}
Self {
pin,
timer,
fcs: 0xffff,
buffer: [0_u8; 128],
buffer_ptr: 0,
}
}
fn update_fcs(&mut self, data: u8) {
let mut new_data = data ^ (self.fcs as u8);
new_data ^= new_data << 4;
self.fcs = (((new_data as u16) << 8) | (self.fcs >> 8))
^ ((new_data >> 4) as u16)
^ ((new_data as u16) << 3);
}
pub fn send_packet(
&mut self,
from: u8,
to: u8,
id: u8,
header_flags: u8,
content: &[u8],
) -> Result<(), Error> {
self.buffer_ptr = 0;
self.fcs = 0xffff;
for _i in 0..6 {
// bits will be inverted upon sending
self.enqueue_sextet(0x2a);
}
self.enqueue_sextet(0x38);
self.enqueue_sextet(0x2c);
let msg_len = content.len() + 7;
self.enqueue_bytes(&[msg_len as u8, from, to, id, header_flags])?;
self.enqueue_bytes(content)?;
let crc = !self.fcs;
self.enqueue_bytes(&[(crc & 0xff) as u8, (crc >> 8) as u8])?;
self.flush()?;
Ok(())
}
fn set_bit(&mut self, bit: bool) -> Result<(), Error> {
if bit {
self.pin.set_high().map_err(|_| Error::Port)?;
} else {
self.pin.set_low().map_err(|_| Error::Port)?;
}
Ok(())
}
fn enqueue_sextet(&mut self, sextet: u8) {
self.buffer[self.buffer_ptr] = sextet;
self.buffer_ptr += 1;
}
fn flush(&mut self) -> Result<(), Error> {
self.timer.start(2000.hz());
for n in 0..self.buffer_ptr {
for i in 0..6 {
self.set_bit((self.buffer[n] & (0x01 << i)) > 0)?;
block!(self.timer.wait()).unwrap();
}
}
self.set_bit(false).unwrap();
Ok(())
}
fn enqueue_bytes(&mut self, bytes: &[u8]) -> Result<(), Error> {
bytes.iter().for_each(|b| {
self.enqueue_sextet(SYMBOLS[(b >> 4) as usize]);
self.enqueue_sextet(SYMBOLS[(b & 0xf) as usize]);
self.update_fcs(*b);
});
Ok(())
}
}
|
#[doc = r"Value read from the register"]
pub struct R {
bits: u8,
}
#[doc = r"Value to write to the register"]
pub struct W {
bits: u8,
}
impl super::TXCSRH3 {
#[doc = r"Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
self.register.set(f(&R { bits }, &mut W { bits }).bits);
}
#[doc = r"Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r"Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
self.register.set(
f(&mut W {
bits: Self::reset_value(),
})
.bits,
);
}
#[doc = r"Reset value of the register"]
#[inline(always)]
pub const fn reset_value() -> u8 {
0
}
#[doc = r"Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.register.set(Self::reset_value())
}
}
#[doc = r"Value of the field"]
pub struct USB_TXCSRH3_DTR {
bits: bool,
}
impl USB_TXCSRH3_DTR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_TXCSRH3_DTW<'a> {
w: &'a mut W,
}
impl<'a> _USB_TXCSRH3_DTW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 0);
self.w.bits |= ((value as u8) & 1) << 0;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_TXCSRH3_DTWER {
bits: bool,
}
impl USB_TXCSRH3_DTWER {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_TXCSRH3_DTWEW<'a> {
w: &'a mut W,
}
impl<'a> _USB_TXCSRH3_DTWEW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 1);
self.w.bits |= ((value as u8) & 1) << 1;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_TXCSRH3_DMAMODR {
bits: bool,
}
impl USB_TXCSRH3_DMAMODR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_TXCSRH3_DMAMODW<'a> {
w: &'a mut W,
}
impl<'a> _USB_TXCSRH3_DMAMODW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 2);
self.w.bits |= ((value as u8) & 1) << 2;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_TXCSRH3_FDTR {
bits: bool,
}
impl USB_TXCSRH3_FDTR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_TXCSRH3_FDTW<'a> {
w: &'a mut W,
}
impl<'a> _USB_TXCSRH3_FDTW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 3);
self.w.bits |= ((value as u8) & 1) << 3;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_TXCSRH3_DMAENR {
bits: bool,
}
impl USB_TXCSRH3_DMAENR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_TXCSRH3_DMAENW<'a> {
w: &'a mut W,
}
impl<'a> _USB_TXCSRH3_DMAENW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 4);
self.w.bits |= ((value as u8) & 1) << 4;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_TXCSRH3_MODER {
bits: bool,
}
impl USB_TXCSRH3_MODER {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_TXCSRH3_MODEW<'a> {
w: &'a mut W,
}
impl<'a> _USB_TXCSRH3_MODEW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 5);
self.w.bits |= ((value as u8) & 1) << 5;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_TXCSRH3_ISOR {
bits: bool,
}
impl USB_TXCSRH3_ISOR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_TXCSRH3_ISOW<'a> {
w: &'a mut W,
}
impl<'a> _USB_TXCSRH3_ISOW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 6);
self.w.bits |= ((value as u8) & 1) << 6;
self.w
}
}
#[doc = r"Value of the field"]
pub struct USB_TXCSRH3_AUTOSETR {
bits: bool,
}
impl USB_TXCSRH3_AUTOSETR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _USB_TXCSRH3_AUTOSETW<'a> {
w: &'a mut W,
}
impl<'a> _USB_TXCSRH3_AUTOSETW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 7);
self.w.bits |= ((value as u8) & 1) << 7;
self.w
}
}
impl R {
#[doc = r"Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
#[doc = "Bit 0 - Data Toggle"]
#[inline(always)]
pub fn usb_txcsrh3_dt(&self) -> USB_TXCSRH3_DTR {
let bits = ((self.bits >> 0) & 1) != 0;
USB_TXCSRH3_DTR { bits }
}
#[doc = "Bit 1 - Data Toggle Write Enable"]
#[inline(always)]
pub fn usb_txcsrh3_dtwe(&self) -> USB_TXCSRH3_DTWER {
let bits = ((self.bits >> 1) & 1) != 0;
USB_TXCSRH3_DTWER { bits }
}
#[doc = "Bit 2 - DMA Request Mode"]
#[inline(always)]
pub fn usb_txcsrh3_dmamod(&self) -> USB_TXCSRH3_DMAMODR {
let bits = ((self.bits >> 2) & 1) != 0;
USB_TXCSRH3_DMAMODR { bits }
}
#[doc = "Bit 3 - Force Data Toggle"]
#[inline(always)]
pub fn usb_txcsrh3_fdt(&self) -> USB_TXCSRH3_FDTR {
let bits = ((self.bits >> 3) & 1) != 0;
USB_TXCSRH3_FDTR { bits }
}
#[doc = "Bit 4 - DMA Request Enable"]
#[inline(always)]
pub fn usb_txcsrh3_dmaen(&self) -> USB_TXCSRH3_DMAENR {
let bits = ((self.bits >> 4) & 1) != 0;
USB_TXCSRH3_DMAENR { bits }
}
#[doc = "Bit 5 - Mode"]
#[inline(always)]
pub fn usb_txcsrh3_mode(&self) -> USB_TXCSRH3_MODER {
let bits = ((self.bits >> 5) & 1) != 0;
USB_TXCSRH3_MODER { bits }
}
#[doc = "Bit 6 - Isochronous Transfers"]
#[inline(always)]
pub fn usb_txcsrh3_iso(&self) -> USB_TXCSRH3_ISOR {
let bits = ((self.bits >> 6) & 1) != 0;
USB_TXCSRH3_ISOR { bits }
}
#[doc = "Bit 7 - Auto Set"]
#[inline(always)]
pub fn usb_txcsrh3_autoset(&self) -> USB_TXCSRH3_AUTOSETR {
let bits = ((self.bits >> 7) & 1) != 0;
USB_TXCSRH3_AUTOSETR { bits }
}
}
impl W {
#[doc = r"Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u8) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - Data Toggle"]
#[inline(always)]
pub fn usb_txcsrh3_dt(&mut self) -> _USB_TXCSRH3_DTW {
_USB_TXCSRH3_DTW { w: self }
}
#[doc = "Bit 1 - Data Toggle Write Enable"]
#[inline(always)]
pub fn usb_txcsrh3_dtwe(&mut self) -> _USB_TXCSRH3_DTWEW {
_USB_TXCSRH3_DTWEW { w: self }
}
#[doc = "Bit 2 - DMA Request Mode"]
#[inline(always)]
pub fn usb_txcsrh3_dmamod(&mut self) -> _USB_TXCSRH3_DMAMODW {
_USB_TXCSRH3_DMAMODW { w: self }
}
#[doc = "Bit 3 - Force Data Toggle"]
#[inline(always)]
pub fn usb_txcsrh3_fdt(&mut self) -> _USB_TXCSRH3_FDTW {
_USB_TXCSRH3_FDTW { w: self }
}
#[doc = "Bit 4 - DMA Request Enable"]
#[inline(always)]
pub fn usb_txcsrh3_dmaen(&mut self) -> _USB_TXCSRH3_DMAENW {
_USB_TXCSRH3_DMAENW { w: self }
}
#[doc = "Bit 5 - Mode"]
#[inline(always)]
pub fn usb_txcsrh3_mode(&mut self) -> _USB_TXCSRH3_MODEW {
_USB_TXCSRH3_MODEW { w: self }
}
#[doc = "Bit 6 - Isochronous Transfers"]
#[inline(always)]
pub fn usb_txcsrh3_iso(&mut self) -> _USB_TXCSRH3_ISOW {
_USB_TXCSRH3_ISOW { w: self }
}
#[doc = "Bit 7 - Auto Set"]
#[inline(always)]
pub fn usb_txcsrh3_autoset(&mut self) -> _USB_TXCSRH3_AUTOSETW {
_USB_TXCSRH3_AUTOSETW { w: self }
}
}
|
use cluFullTransmute::contract::Contract;
/*
For example, we will sign a contract to convert a String to a Vec<u8>,
although this may not be exactly the case.
Contracts are needed to create more secure APIs using transmutation in
situations where it can't be proven.
*/
///
struct MyData {
data: Contract<&'static str, &'static [u8]>,
}
impl MyData {
#[inline]
const fn new(data: &'static str) -> Self {
let data = unsafe {
// Contract::force_new
//
// The `checksize_new_or_panic` function can only guarantee equality of data
// dimensions, creating a contract is always unsafe, since the transmutation
// of such data types can only be proven orally. But after signing the
// transmutation contract, all functions for working with the transmuted are
// not marked as unsafe.
//
Contract::checksize_new_or_panic(data)
};
Self {
data,
}
}
#[inline]
pub fn as_data(&self) -> &'static str {
&self.data
}
#[inline]
pub fn as_sliceu8(&self) -> &'static [u8] {
self.data.as_datato()
}
#[inline]
pub fn into(self) -> &'static [u8] {
self.data.into()
}
}
fn main() {
const C_DATA: &'static str = "Test";
// &'static str
let data = MyData::new(C_DATA);
assert_eq!(data.as_data(), C_DATA); // const_readtype: &'static str
assert_eq!(data.as_sliceu8(), C_DATA.as_bytes()); //const_readtype &'static [u8]
//
// &'static u8
let vec = data.into(); // const_transmute: &'static str -> &'static [u8]
assert_eq!(vec, C_DATA.as_bytes());
}
|
use std::boxed::Box;
// Took a swing at it myself, got a bad-but-working
// version, then followed this tutorial:
// https://rust-unofficial.github.io/too-many-lists/second-iter-mut.html
#[derive(Debug)]
struct Node<T> {
value: T,
next: Option<Box<Node<T>>>,
}
#[derive(Debug)]
pub struct LinkedList<T> {
first: Option<Box<Node<T>>>,
}
impl<T> LinkedList<T> {
pub const fn new() -> Self {
return LinkedList {
first: None
};
}
pub fn push(&mut self, val: T) {
let new_node = Box::new(Node {
value: val,
next: self.first.take(),
});
self.first = Some(new_node);
}
pub fn pop(&mut self) -> Option<T> {
self.first.take().map(|node| {
self.first = node.next;
return node.value;
})
}
pub fn peek(&self) -> Option<&T> {
self.first.as_ref().map(|node| &node.value)
}
/*
pub fn peek_mut(&mut self) -> Option<&mut T> {
self.first.as_mut().map(|node| &mut node.value)
}*/
}
// IntoIter
pub struct IntoIter<T>(LinkedList<T>);
impl<T> LinkedList<T> {
pub fn into_iter(self) -> IntoIter<T> {
IntoIter(self)
}
}
impl<T> Iterator for IntoIter<T> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
// access fields of a tuple struct numerically
self.0.pop()
}
}
// Iter
pub struct Iter<'a, T> {
next: Option<&'a Node<T>>,
}
impl<T> LinkedList<T> {
pub fn iter(&self) -> Iter<T> {
Iter { next: self.first.as_ref().map(|node| &**node) }
}
}
impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
self.next.map(|node| {
self.next = node.next.as_ref().map(|node| &**node);
&node.value
})
}
}
// IterMut
pub struct IterMut<'a, T> {
next: Option<&'a mut Node<T>>,
}
impl<T> LinkedList<T> {
pub fn iter_mut(&mut self) -> IterMut<'_, T> {
IterMut { next: self.first.as_mut().map(|node| &mut **node) }
}
}
impl<'a, T> Iterator for IterMut<'a, T> {
type Item = &'a mut T;
fn next(&mut self) -> Option<Self::Item> {
self.next.take().map(|node| {
self.next = node.next.as_mut().map(|node| &mut **node);
&mut node.value
})
}
} |
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use std::collections::HashMap;
use ethernet as eth;
use netstack3_core::{DeviceId, IdMapCollection, IdMapCollectionKey};
pub type BindingId = u64;
/// Keeps tabs on devices.
///
/// `Devices` keeps a list of devices that can be either *active* or *inactive*.
/// An *active* device has an associated [`CoreId`] and an *inactive* one
/// doesn't.
///
/// The type parameters `C` and `I` are for the core ID type and the extra
/// information associated with the device, respectively, and default to the
/// types used by `EventLoop` for brevity in the main use case. The type
/// parameters are there to allow testing without dependencies on `core`.
// NOTE: Devices uses separate hash maps internally for active and inactive
// devices to guarantee that the fast path - sending and receiving frames - can
// be each be achieved with a single hash map lookup to get the necessary
// information. When sending frames, we lookup with CoreId on active_devices
// to retrieve the driver client, and when receiving frames we just get the
// CoreId from id_map.
// For users of this mod, this split in hash maps should be completely opaque.
// The main use cases are operated as follows:
// - Core asks us to send a frame on a device, indexed by the core ID:
// => A single lookup in active_devices will retrieve the device info, which
// contains the device client.
// - We receive a frame from a device, indexed by the binding ID:
// => A single lookup in id_map will retrieve the core ID, which is enough
// information to send the frame into core.
// - Core asks us to send a message about a device to an application client:
// => A single lookup in active_devices will retrieve the device info, which
// contains the binding ID and all other information that an application
// client may need.
// - An application client wants to operate on an active device (e.g. add an IP
// address):
// => Two lookups are necessary, one in id_map to retrieve the core ID
// followed by one in active_devices.
// - An application client asks us to bring a device up or down, or a device
// goes online or offline (in both cases addressed by binding ID):
// => For the down case, entries are removed from both active_devices and
// id_map and the device info is moved into inactive_devices.
// => For the up case, the inactive_devices entry is removed and one entry is
// created in each of active_devices and id_map.
pub struct Devices<C: IdMapCollectionKey = DeviceId, I = CommonInfo> {
active_devices: IdMapCollection<C, DeviceInfo<C, I>>,
// invariant: all values in id_map are valid keys in active_devices.
id_map: HashMap<BindingId, C>,
inactive_devices: HashMap<BindingId, DeviceInfo<C, I>>,
last_id: BindingId,
}
impl<C: IdMapCollectionKey, I> Default for Devices<C, I> {
fn default() -> Self {
Self {
active_devices: IdMapCollection::new(),
id_map: HashMap::new(),
inactive_devices: HashMap::new(),
last_id: 0,
}
}
}
/// Errors that may be returned by switching a device state.
///
/// See [`Devices::activate_device`] and [`Devices::deactivate_device`].
#[derive(Debug, Eq, PartialEq)]
pub enum ToggleError {
/// No change to device's active or inactive state.
NoChange,
/// Informed device identifier not found.
NotFound,
}
impl<C, I> Devices<C, I>
where
C: IdMapCollectionKey + Clone,
{
/// Allocates a new [`BindingId`].
fn alloc_id(&mut self) -> BindingId {
self.last_id = self.last_id + 1;
self.last_id
}
/// Adds a new active device.
///
/// Adds a new active device if the informed `core_id` is valid (i.e., not
/// currently tracked by [`Devices`]). A new [`BindingId`] will be allocated
/// and a [`DeviceInfo`] struct will be created with the provided `info` and
/// IDs.
pub fn add_active_device(&mut self, core_id: C, info: I) -> Option<BindingId> {
if self.active_devices.get(&core_id).is_some() {
return None;
}
let id = self.alloc_id();
self.active_devices
.insert(&core_id, DeviceInfo { id, core_id: Some(core_id.clone()), info });
self.id_map.insert(id, core_id);
Some(id)
}
/// Adds a new device in the inactive state.
///
/// Adds a new device with `info`. A new [`BindingId`] will be allocated
/// and a [`DeviceInfo`] struct will be created with the provided `info` and
/// the generated [`BindingId`].
///
/// The new device will *not* have a `core_id` allocated, that can be done
/// by calling [`Devices::activate_device`] with the newly created
/// [`BindingId`].
pub fn add_device(&mut self, info: I) -> BindingId {
let id = self.alloc_id();
self.inactive_devices.insert(id, DeviceInfo { id, core_id: None, info });
id
}
/// Activates a device with `id`, using the closure to associate a `core_id`
/// with it.
///
/// Activates a device with `id` if all the conditions are true:
/// - `id` exists.
/// - `id` is not already attached to a `core_id`.
///
/// On success, returns a ref to the updated [`DeviceInfo`] containing the
/// provided `core_id`.
///
/// # Panics
///
/// Panics if the returned core ID `C` exists and is tracked by this
/// `Devices` collection.
pub fn activate_device<F: FnOnce(&DeviceInfo<C, I>) -> C>(
&mut self,
id: BindingId,
generate_core_id: F,
) -> Result<&DeviceInfo<C, I>, ToggleError> {
if self.id_map.contains_key(&id) {
return Err(ToggleError::NoChange);
}
match self.inactive_devices.remove(&id) {
None => Err(ToggleError::NotFound),
Some(mut info) => {
let core_id = generate_core_id(&info);
assert!(self.active_devices.get(&core_id).is_none());
assert!(info.core_id.is_none());
info.core_id = Some(core_id.clone());
self.id_map.insert(id, core_id.clone());
self.active_devices.insert(&core_id, info);
// we can unwrap here because we just inserted the device
// above.
Ok(self.active_devices.get(&core_id).unwrap())
}
}
}
/// Deactivates a device with `id`, disassociating its `core_id`.
///
/// Deactivates a device with `id` if all the conditions are true:
/// - `id` exists.
/// - `id` has an associated `core_id`.
///
/// On success, returnes a ref to the updated [`DeviceInfo`] and the
/// previously associated `core_id`.
pub fn deactivate_device(
&mut self,
id: BindingId,
) -> Result<(C, &mut DeviceInfo<C, I>), ToggleError> {
if self.inactive_devices.contains_key(&id) {
return Err(ToggleError::NoChange);
}
match self.id_map.remove(&id) {
None => Err(ToggleError::NotFound),
Some(core_id) => {
// we can unwrap here because of the invariant between
// id_map and active_devices.
let mut dev_id = self.active_devices.remove(&core_id).unwrap();
dev_id.core_id = None;
Ok((core_id, self.inactive_devices.entry(id).or_insert(dev_id)))
}
}
}
/// Removes a device from the internal list.
///
/// Removes a device from the internal [`Devices`] list and returns the
/// associated [`DeviceInfo`] if `id` is found or `None` otherwise.
pub fn remove_device(&mut self, id: BindingId) -> Option<DeviceInfo<C, I>> {
match self.id_map.remove(&id) {
Some(core) => self.active_devices.remove(&core),
None => self.inactive_devices.remove(&id),
}
}
/// Gets an iterator over all tracked devices.
pub fn iter_devices(&self) -> impl Iterator<Item = &DeviceInfo<C, I>> {
self.active_devices.iter().chain(self.inactive_devices.values())
}
/// Retrieve device with [`BindingId`].
pub fn get_device(&self, id: BindingId) -> Option<&DeviceInfo<C, I>> {
self.id_map
.get(&id)
.and_then(|device_id| self.active_devices.get(&device_id))
.or_else(|| self.inactive_devices.get(&id))
}
/// Retrieve mutable reference to device with [`BindingId`].
pub fn get_device_mut(&mut self, id: BindingId) -> Option<&mut DeviceInfo<C, I>> {
if let Some(device_id) = self.id_map.get(&id) {
self.active_devices.get_mut(&device_id)
} else {
self.inactive_devices.get_mut(&id)
}
}
/// Retrieve associated `core_id` for [`BindingId`].
pub fn get_core_id(&self, id: BindingId) -> Option<C> {
self.id_map.get(&id).cloned()
}
/// Retrieve mutable reference to device by associated [`CoreId`] `id`.
pub fn get_core_device_mut(&mut self, id: C) -> Option<&mut DeviceInfo<C, I>> {
self.active_devices.get_mut(&id)
}
/// Retrieve associated `binding_id` for `core_id`.
pub fn get_binding_id(&self, core_id: C) -> Option<BindingId> {
self.active_devices.get(&core_id).map(|d| d.id)
}
}
/// Device information kept in [`DeviceInfo`].
pub struct CommonInfo {
path: String,
client: eth::Client,
admin_enabled: bool,
phy_up: bool,
}
impl CommonInfo {
pub fn new(path: String, client: eth::Client, admin_enabled: bool, phy_up: bool) -> Self {
Self { path, client, admin_enabled, phy_up }
}
}
/// Device information kept by [`Devices`].
#[derive(Debug)]
pub struct DeviceInfo<C = DeviceId, I = CommonInfo> {
id: BindingId,
core_id: Option<C>,
info: I,
}
impl<C, I> DeviceInfo<C, I>
where
C: Clone,
{
pub fn core_id(&self) -> Option<C> {
self.core_id.clone()
}
pub fn id(&self) -> BindingId {
self.id
}
#[cfg(test)]
pub fn is_active(&self) -> bool {
self.core_id.is_some()
}
}
impl<C> DeviceInfo<C, CommonInfo> {
pub fn path(&self) -> &String {
&self.info.path
}
pub fn client(&self) -> ð::Client {
&self.info.client
}
pub fn client_mut(&mut self) -> &mut eth::Client {
&mut self.info.client
}
pub fn admin_enabled(&self) -> bool {
self.info.admin_enabled
}
pub fn set_admin_enabled(&mut self, setting: bool) {
self.info.admin_enabled = setting;
}
pub fn phy_up(&self) -> bool {
self.info.phy_up
}
pub fn set_phy_up(&mut self, setting: bool) {
self.info.phy_up = setting;
}
}
#[cfg(test)]
mod tests {
use super::*;
type TestDevices = Devices<MockDeviceId, u64>;
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
struct MockDeviceId(usize);
impl IdMapCollectionKey for MockDeviceId {
const VARIANT_COUNT: usize = 1;
fn get_variant(&self) -> usize {
0
}
fn get_id(&self) -> usize {
self.0 as usize
}
}
#[test]
fn test_add_remove_active_device() {
let mut d = TestDevices::default();
let core_a = MockDeviceId(1);
let core_b = MockDeviceId(2);
let a = d.add_active_device(core_a, 10).expect("can add device");
let b = d.add_active_device(core_b, 20).expect("can add device");
assert_ne!(a, b, "allocated same id");
assert!(d.add_active_device(core_a, 10).is_none(), "can't add same id again");
// check that ids are incrementing
assert_eq!(d.last_id, 2);
// check that devices are correctly inserted and carry the core id.
assert_eq!(d.get_device(a).unwrap().core_id.unwrap(), core_a);
assert_eq!(d.get_device(b).unwrap().core_id.unwrap(), core_b);
assert_eq!(d.get_core_id(a).unwrap(), core_a);
assert_eq!(d.get_core_id(b).unwrap(), core_b);
assert_eq!(d.get_binding_id(core_a).unwrap(), a);
assert_eq!(d.get_binding_id(core_b).unwrap(), b);
// check that we can retrieve both devices by the core id:
assert!(d.get_core_device_mut(core_a).is_some());
assert!(d.get_core_device_mut(core_b).is_some());
// remove both devices
let info_a = d.remove_device(a).expect("can remove device");
let info_b = d.remove_device(b).expect("can remove device");
assert_eq!(info_a.info, 10);
assert_eq!(info_b.info, 20);
assert_eq!(info_a.core_id.unwrap(), core_a);
assert_eq!(info_b.core_id.unwrap(), core_b);
// removing device again will fail
assert!(d.remove_device(a).is_none());
// retrieving the devices now should fail:
assert!(d.get_device(a).is_none());
assert!(d.get_core_id(a).is_none());
assert!(d.get_core_device_mut(core_a).is_none());
assert!(d.active_devices.is_empty());
assert!(d.inactive_devices.is_empty());
assert!(d.id_map.is_empty());
}
#[test]
fn test_add_remove_inactive_device() {
let mut d = TestDevices::default();
let a = d.add_device(10);
let b = d.add_device(20);
assert_ne!(a, b, "allocated same id");
// check that ids are incrementing
assert_eq!(d.last_id, 2);
// check that devices are correctly inserted and don't
// carry a core id.
assert!(d.get_device(a).unwrap().core_id.is_none());
assert!(d.get_device(b).unwrap().core_id.is_none());
assert!(d.get_core_id(a).is_none());
assert!(d.get_core_id(b).is_none());
// remove both devices
let info_a = d.remove_device(a).expect("can remove device");
let info_b = d.remove_device(b).expect("can remove device");
assert_eq!(info_a.info, 10);
assert_eq!(info_b.info, 20);
assert!(info_a.core_id.is_none());
assert!(info_b.core_id.is_none());
// removing device again will fail
assert!(d.remove_device(a).is_none());
// retrieving the device now should fail:
assert!(d.get_device(a).is_none());
assert!(d.get_core_id(a).is_none());
assert!(d.active_devices.is_empty());
assert!(d.inactive_devices.is_empty());
assert!(d.id_map.is_empty());
}
#[test]
fn test_activate_device() {
let mut d = TestDevices::default();
let core_a = MockDeviceId(1);
let core_b = MockDeviceId(2);
let a = d.add_device(10);
let b = d.add_active_device(core_b, 20).unwrap();
assert_eq!(d.activate_device(1000, |_| core_a).unwrap_err(), ToggleError::NotFound);
assert_eq!(d.activate_device(b, |_| core_b).unwrap_err(), ToggleError::NoChange);
let info = d.activate_device(a, |_| core_a).expect("can activate device");
assert_eq!(info.info, 10);
assert_eq!(info.core_id.unwrap(), core_a);
// both a and b should be active now:
assert!(d.inactive_devices.is_empty());
}
#[test]
#[should_panic]
fn test_activate_duplicate_core_id() {
let mut devices = TestDevices::default();
let core_id = MockDeviceId(1);
// Add an active device with core_id
devices.add_active_device(core_id, 20).unwrap();
// Trying to activate another device with the same core_id should panic
let second_device = devices.add_device(10);
let _result = devices.activate_device(second_device, |_| core_id);
}
#[test]
fn test_deactivate_device() {
let mut d = TestDevices::default();
let core_a = MockDeviceId(1);
let a = d.add_active_device(core_a, 10).unwrap();
let b = d.add_device(20);
assert_eq!(d.deactivate_device(b).unwrap_err(), ToggleError::NoChange);
assert_eq!(d.deactivate_device(1000).unwrap_err(), ToggleError::NotFound);
let (core, info) = d.deactivate_device(a).unwrap();
assert_eq!(core, core_a);
assert_eq!(info.info, 10);
assert!(info.core_id.is_none());
// both a and b should be inactive now:
assert!(d.active_devices.is_empty());
assert!(d.id_map.is_empty());
}
#[test]
fn test_iter() {
let mut d = TestDevices::default();
let core_a = MockDeviceId(1);
let a = d.add_active_device(core_a, 10).unwrap();
let b = d.add_device(20);
// check that we can iterate over active and inactive devices seamlessly
assert_eq!(d.iter_devices().count(), 2);
let mut touch_a = false;
let mut touch_b = false;
for dev in d.iter_devices() {
if dev.id == a {
assert!(!touch_a);
touch_a = true;
} else if dev.id == b {
assert!(!touch_b);
touch_b = true;
} else {
panic!("Unexpected id");
}
}
assert!(touch_a && touch_b);
}
}
|
// Copyright 2018 Parity Technologies (UK) Ltd.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
// OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
use bytes::{Buf, BufMut, Bytes, BytesMut, IntoBuf};
use encode;
use decode::{self, Error};
use std::{io, marker::PhantomData, usize};
use tokio_codec::{Encoder, Decoder};
/// tokio-codec based encoder + decoder of unsigned-varint values
#[derive(Default)]
pub struct Uvi<T>(PhantomData<*const T>);
// Implement tokio-codec `Encoder` + `Decoder` traits for unsigned integers.
macro_rules! encoder_decoder_impls {
($typ:ty, $enc:expr, $dec:expr, $arr:expr) => {
impl Encoder for Uvi<$typ> {
type Item = $typ;
type Error = io::Error;
fn encode(&mut self, item: Self::Item, dst: &mut BytesMut) -> Result<(), Self::Error> {
let mut buf = $arr;
dst.extend_from_slice($enc(item, &mut buf));
Ok(())
}
}
impl Decoder for Uvi<$typ> {
type Item = $typ;
type Error = io::Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
let (number, consumed) =
match $dec(src.as_ref()) {
Ok((n, rem)) => (n, src.len() - rem.len()),
Err(Error::Insufficient) => return Ok(None),
Err(e) => return Err(io::Error::new(io::ErrorKind::Other, e))
};
src.split_to(consumed);
Ok(Some(number))
}
}
}
}
encoder_decoder_impls!(u8, encode::u8, decode::u8, encode::u8_buffer());
encoder_decoder_impls!(u16, encode::u16, decode::u16, encode::u16_buffer());
encoder_decoder_impls!(u32, encode::u32, decode::u32, encode::u32_buffer());
encoder_decoder_impls!(u64, encode::u64, decode::u64, encode::u64_buffer());
encoder_decoder_impls!(u128, encode::u128, decode::u128, encode::u128_buffer());
encoder_decoder_impls!(usize, encode::usize, decode::usize, encode::usize_buffer());
/// tokio-codec based encoder + decoder of unsigned-varint, length-prefixed bytes
pub struct UviBytes<T = Bytes> {
len: Option<usize>, // number of bytes (for decoding only)
max: usize, // max. number of bytes (for decoding only)
_ty: PhantomData<T>
}
impl<T> Default for UviBytes<T> {
fn default() -> Self {
Self { len: None, max: usize::MAX, _ty: PhantomData }
}
}
impl<T> UviBytes<T> {
/// Limit the maximum allowed length of bytes.
pub fn set_max_len(&mut self, val: usize) {
self.max = val
}
}
impl<T: IntoBuf> Encoder for UviBytes<T> {
type Item = T;
type Error = io::Error;
fn encode(&mut self, item: Self::Item, dst: &mut BytesMut) -> Result<(), Self::Error> {
let bytes = item.into_buf();
Uvi::<usize>::default().encode(bytes.remaining(), dst)?;
dst.reserve(bytes.remaining());
dst.put(bytes);
Ok(())
}
}
impl<T> Decoder for UviBytes<T> {
type Item = BytesMut;
type Error = io::Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
loop {
match self.len.take() {
None => {
self.len = Uvi::<usize>::default().decode(src)?;
if self.len.is_none() {
return Ok(None)
}
continue
}
Some(n) if n > self.max => {
return Err(io::Error::new(io::ErrorKind::PermissionDenied, "len > max"))
}
Some(n) => {
if src.len() < n {
let add = n - src.len();
src.reserve(add);
self.len = Some(n);
return Ok(None)
} else {
return Ok(Some(src.split_to(n)))
}
}
}
}
}
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use fuchsia_criterion::{criterion, FuchsiaCriterion};
fn fib(n: u64) -> u64 {
match n {
0 => 1,
1 => 1,
n => fib(n - 1) + fib(n - 2),
}
}
fn main() {
let mut c = FuchsiaCriterion::default();
c.bench_function("fib(20)", |b| b.iter(|| fib(criterion::black_box(20))));
}
|
use crate::image_range::ImageRange;
use crate::semi_dense::fusion::fusion;
use crate::semi_dense::numeric::Inverse;
use crate::semi_dense::stat;
use crate::warp::{PerspectiveWarp, Warp};
use ndarray::{arr1, Array, Array2, Data};
use std::collections::HashMap;
fn propagate_variance(
depth0: f64,
depth1: f64,
variance0: f64,
uncertaintity: f64,
) -> f64 {
// we assume that rotation change is small between timestamp 0 and timestamp 1
// TODO accept the case that rotation change is significantly large
let ratio = depth1.inv() / depth0.inv();
f64::from(ratio).powi(4) * variance0 + uncertaintity // variance1
}
fn handle_collision(
depth_a: f64,
depth_b: f64,
variance_a: f64,
variance_b: f64,
) -> (f64, f64) {
if stat::are_statically_same(
depth_a.inv(), depth_b.inv(), variance_a, variance_b
) {
let (inv_depth, variance) = fusion(
depth_a.inv(),
depth_b.inv(),
variance_a,
variance_b
);
return (inv_depth.inv(), variance);
}
if depth_a < depth_b {
// b is hidden by a
return (depth_a, variance_a);
} else {
// a is hidden by b
return (depth_b, variance_b);
}
}
pub fn propagate<T: Data<Elem = f64>>(
warp10: &PerspectiveWarp<T>,
depth_map0: &Array2<f64>,
variance_map0: &Array2<f64>,
default_depth: f64,
default_variance: f64,
uncertaintity_bias: f64,
) -> (Array2<f64>, Array2<f64>) {
let shape = depth_map0.shape();
let (height, width) = (shape[0], shape[1]);
let mut map1: HashMap<(usize, usize), (f64, f64)> = HashMap::new();
for y0 in 0..height {
for x0 in 0..width {
let u0 = arr1(&[x0 as f64, y0 as f64]);
let depth0 = depth_map0[[y0 as usize, x0 as usize]];
let (u1, depth1a) = warp10.warp(&u0, depth0);
if !u1.is_in_range(shape) {
continue;
}
let variance0 = variance_map0[[y0 as usize, x0 as usize]];
let variance1a = propagate_variance(depth0, depth1a, variance0,
uncertaintity_bias);
let u1 = (u1[0] as usize, u1[1] as usize);
let (depth1, variance1) = match map1.get(&u1) {
Some((depth1b, variance1b)) => {
handle_collision(depth1a, *depth1b, variance1a, *variance1b)
},
None => (depth1a, variance1a)
};
map1.insert(u1, (depth1, variance1));
}
}
let mut depth_map1 = Array::from_elem((height, width), default_depth);
let mut variance_map1 = Array::from_elem((height, width), default_variance);
for ((x1, y1), (depth1, variance1)) in map1.iter() {
depth_map1[[*y1, *x1]] = *depth1;
variance_map1[[*y1, *x1]] = *variance1;
}
(depth_map1, variance_map1)
}
#[cfg(test)]
mod tests {
use super::*;
use approx::assert_abs_diff_eq;
use ndarray::arr2;
use crate::camera::CameraParameters;
#[test]
fn test_propagate_variance() {
let depth0 = 4.0;
let depth1 = 2.0;
let inv_depth0 = 1. / depth0;
let inv_depth1 = 1. / depth1;
let variance0 = 0.5;
let r = inv_depth1 / inv_depth0;
assert_eq!(
propagate_variance(depth0, depth1, variance0, 1.0),
(r * r * r * r) * variance0 + 1.0
)
}
#[test]
fn test_propagate() {
let (width, height) = (8, 8);
let shape = (height, width);
let camera_params = CameraParameters::new(
(100., 100.),
(width as f64 / 2., height as f64 / 2.)
);
let default_depth = 60.;
let default_variance = 8.;
let uncertaintity_bias = 3.;
let transform10 = arr2(
&[[1., 0., 0., 0.],
[0., 1., 0., 0.],
[0., 0., 1., 300.],
[0., 0., 0., 1.]]
);
let warp10 = PerspectiveWarp::new(&transform10, &camera_params, &camera_params);
let depth0 = 100.;
let variance0 = 20.;
// -1.0 + 4, -1.0 + 4,
let depth_map0 = Array::from_elem(shape, depth0);
let variance_map0 = Array::from_elem(shape, variance0);
let (depth_map1, variance_map1) = propagate(
&warp10,
&depth_map0,
&variance_map0,
default_depth,
default_variance,
uncertaintity_bias
);
let depth1 = 400.;
let mut expected = Array::from_elem(shape, default_depth);
expected.slice_mut(s![3..5, 3..5])
.assign(&Array::from_elem((2, 2), depth1));
for y in 0..height {
for x in 0..width {
assert_abs_diff_eq!(depth_map1[[y, x]], expected[[y, x]],
epsilon = 1e-4);
}
}
let variance1 = propagate_variance(depth0, depth1,
variance0, uncertaintity_bias);
// 16 pixels in variance_map0 will be
// fused into 1 pixel in variance_map1
// Therefore variance should be decreased to 1/16
let mut expected = Array::from_elem(shape, default_variance);
expected.slice_mut(s![3..5, 3..5])
.assign(&Array::from_elem((2, 2), variance1 / 16.));
for y in 0..height {
for x in 0..width {
assert_abs_diff_eq!(variance_map1[[y, x]], expected[[y, x]],
epsilon = 1e-4);
}
}
}
}
|
/*
* File : test/mod.rs
* Purpose: test module
* Program: red
* About : command-line text editor
* Authors: Tommy Lincoln <pajamapants3000@gmail.com>
* License: MIT; See LICENSE!
* Notes : Notes on successful compilation
* Created: 10/26/2016
*/
// *** Bring in to namespace *** {{{
//use super::*;
//use parse::*;
//use error::*;
// ^^^ Bring in to namespace ^^^ }}}
// *** Attributes *** {{{
// ^^^ Attributes ^^^ }}}
// *** Constants *** {{{
// ^^^ Constants ^^^ }}}
// *** Data Structures *** {{{
// ^^^ Data Structures ^^^ }}}
// *** Functions *** {{{
#[cfg(not(test))]
fn main() {
println!(
"If you are reading this, the tests were neither compiled nor run!");
}
// ^^^ Functions ^^^ }}}
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[link(name = "windows")]
extern "system" {}
pub type AccountsSettingsPane = *mut ::core::ffi::c_void;
pub type AccountsSettingsPaneCommandsRequestedEventArgs = *mut ::core::ffi::c_void;
pub type AccountsSettingsPaneEventDeferral = *mut ::core::ffi::c_void;
pub type CredentialCommand = *mut ::core::ffi::c_void;
pub type CredentialCommandCredentialDeletedHandler = *mut ::core::ffi::c_void;
pub type SettingsCommand = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct SettingsEdgeLocation(pub i32);
impl SettingsEdgeLocation {
pub const Right: Self = Self(0i32);
pub const Left: Self = Self(1i32);
}
impl ::core::marker::Copy for SettingsEdgeLocation {}
impl ::core::clone::Clone for SettingsEdgeLocation {
fn clone(&self) -> Self {
*self
}
}
pub type SettingsPane = *mut ::core::ffi::c_void;
pub type SettingsPaneCommandsRequest = *mut ::core::ffi::c_void;
pub type SettingsPaneCommandsRequestedEventArgs = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct SupportedWebAccountActions(pub u32);
impl SupportedWebAccountActions {
pub const None: Self = Self(0u32);
pub const Reconnect: Self = Self(1u32);
pub const Remove: Self = Self(2u32);
pub const ViewDetails: Self = Self(4u32);
pub const Manage: Self = Self(8u32);
pub const More: Self = Self(16u32);
}
impl ::core::marker::Copy for SupportedWebAccountActions {}
impl ::core::clone::Clone for SupportedWebAccountActions {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct WebAccountAction(pub i32);
impl WebAccountAction {
pub const Reconnect: Self = Self(0i32);
pub const Remove: Self = Self(1i32);
pub const ViewDetails: Self = Self(2i32);
pub const Manage: Self = Self(3i32);
pub const More: Self = Self(4i32);
}
impl ::core::marker::Copy for WebAccountAction {}
impl ::core::clone::Clone for WebAccountAction {
fn clone(&self) -> Self {
*self
}
}
pub type WebAccountCommand = *mut ::core::ffi::c_void;
pub type WebAccountCommandInvokedHandler = *mut ::core::ffi::c_void;
pub type WebAccountInvokedArgs = *mut ::core::ffi::c_void;
pub type WebAccountProviderCommand = *mut ::core::ffi::c_void;
pub type WebAccountProviderCommandInvokedHandler = *mut ::core::ffi::c_void;
|
use libc;
use libc::strcmp;
pub unsafe fn single_argv(mut argv: *mut *mut libc::c_char) -> *mut libc::c_char {
if !(*argv.offset(1)).is_null()
&& strcmp(
*argv.offset(1),
b"--\x00" as *const u8 as *const libc::c_char,
) == 0
{
argv = argv.offset(1)
}
if (*argv.offset(1)).is_null() || !(*argv.offset(2)).is_null() {
crate::libbb::appletlib::bb_show_usage();
}
return *argv.offset(1);
}
|
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)]
#[cfg(feature = "Win32_Foundation")]
#[inline]
pub unsafe fn ApplicationRecoveryFinished<'a, Param0: ::windows::core::IntoParam<'a, super::super::Foundation::BOOL>>(bsuccess: Param0) {
#[cfg(windows)]
{
#[link(name = "windows")]
extern "system" {
fn ApplicationRecoveryFinished(bsuccess: super::super::Foundation::BOOL);
}
::core::mem::transmute(ApplicationRecoveryFinished(bsuccess.into_param().abi()))
}
#[cfg(not(windows))]
unimplemented!("Unsupported target OS");
}
#[cfg(feature = "Win32_Foundation")]
#[inline]
pub unsafe fn ApplicationRecoveryInProgress() -> ::windows::core::Result<super::super::Foundation::BOOL> {
#[cfg(windows)]
{
#[link(name = "windows")]
extern "system" {
fn ApplicationRecoveryInProgress(pbcancelled: *mut super::super::Foundation::BOOL) -> ::windows::core::HRESULT;
}
let mut result__: <super::super::Foundation::BOOL as ::windows::core::Abi>::Abi = ::core::mem::zeroed();
ApplicationRecoveryInProgress(&mut result__).from_abi::<super::super::Foundation::BOOL>(result__)
}
#[cfg(not(windows))]
unimplemented!("Unsupported target OS");
}
#[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_WindowsProgramming"))]
#[inline]
pub unsafe fn GetApplicationRecoveryCallback<'a, Param0: ::windows::core::IntoParam<'a, super::super::Foundation::HANDLE>>(hprocess: Param0, precoverycallback: *mut ::core::option::Option<super::WindowsProgramming::APPLICATION_RECOVERY_CALLBACK>, ppvparameter: *mut *mut ::core::ffi::c_void, pdwpinginterval: *mut u32, pdwflags: *mut u32) -> ::windows::core::Result<()> {
#[cfg(windows)]
{
#[link(name = "windows")]
extern "system" {
fn GetApplicationRecoveryCallback(hprocess: super::super::Foundation::HANDLE, precoverycallback: *mut ::windows::core::RawPtr, ppvparameter: *mut *mut ::core::ffi::c_void, pdwpinginterval: *mut u32, pdwflags: *mut u32) -> ::windows::core::HRESULT;
}
GetApplicationRecoveryCallback(hprocess.into_param().abi(), ::core::mem::transmute(precoverycallback), ::core::mem::transmute(ppvparameter), ::core::mem::transmute(pdwpinginterval), ::core::mem::transmute(pdwflags)).ok()
}
#[cfg(not(windows))]
unimplemented!("Unsupported target OS");
}
#[cfg(feature = "Win32_Foundation")]
#[inline]
pub unsafe fn GetApplicationRestartSettings<'a, Param0: ::windows::core::IntoParam<'a, super::super::Foundation::HANDLE>>(hprocess: Param0, pwzcommandline: super::super::Foundation::PWSTR, pcchsize: *mut u32, pdwflags: *mut u32) -> ::windows::core::Result<()> {
#[cfg(windows)]
{
#[link(name = "windows")]
extern "system" {
fn GetApplicationRestartSettings(hprocess: super::super::Foundation::HANDLE, pwzcommandline: super::super::Foundation::PWSTR, pcchsize: *mut u32, pdwflags: *mut u32) -> ::windows::core::HRESULT;
}
GetApplicationRestartSettings(hprocess.into_param().abi(), ::core::mem::transmute(pwzcommandline), ::core::mem::transmute(pcchsize), ::core::mem::transmute(pdwflags)).ok()
}
#[cfg(not(windows))]
unimplemented!("Unsupported target OS");
}
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: marker :: Copy, :: core :: clone :: Clone, :: core :: default :: Default, :: core :: fmt :: Debug)]
#[repr(transparent)]
pub struct REGISTER_APPLICATION_RESTART_FLAGS(pub u32);
pub const RESTART_NO_CRASH: REGISTER_APPLICATION_RESTART_FLAGS = REGISTER_APPLICATION_RESTART_FLAGS(1u32);
pub const RESTART_NO_HANG: REGISTER_APPLICATION_RESTART_FLAGS = REGISTER_APPLICATION_RESTART_FLAGS(2u32);
pub const RESTART_NO_PATCH: REGISTER_APPLICATION_RESTART_FLAGS = REGISTER_APPLICATION_RESTART_FLAGS(4u32);
pub const RESTART_NO_REBOOT: REGISTER_APPLICATION_RESTART_FLAGS = REGISTER_APPLICATION_RESTART_FLAGS(8u32);
impl ::core::convert::From<u32> for REGISTER_APPLICATION_RESTART_FLAGS {
fn from(value: u32) -> Self {
Self(value)
}
}
unsafe impl ::windows::core::Abi for REGISTER_APPLICATION_RESTART_FLAGS {
type Abi = Self;
}
impl ::core::ops::BitOr for REGISTER_APPLICATION_RESTART_FLAGS {
type Output = Self;
fn bitor(self, rhs: Self) -> Self {
Self(self.0 | rhs.0)
}
}
impl ::core::ops::BitAnd for REGISTER_APPLICATION_RESTART_FLAGS {
type Output = Self;
fn bitand(self, rhs: Self) -> Self {
Self(self.0 & rhs.0)
}
}
impl ::core::ops::BitOrAssign for REGISTER_APPLICATION_RESTART_FLAGS {
fn bitor_assign(&mut self, rhs: Self) {
self.0.bitor_assign(rhs.0)
}
}
impl ::core::ops::BitAndAssign for REGISTER_APPLICATION_RESTART_FLAGS {
fn bitand_assign(&mut self, rhs: Self) {
self.0.bitand_assign(rhs.0)
}
}
impl ::core::ops::Not for REGISTER_APPLICATION_RESTART_FLAGS {
type Output = Self;
fn not(self) -> Self {
Self(self.0.not())
}
}
#[cfg(feature = "Win32_System_WindowsProgramming")]
#[inline]
pub unsafe fn RegisterApplicationRecoveryCallback(precoveycallback: ::core::option::Option<super::WindowsProgramming::APPLICATION_RECOVERY_CALLBACK>, pvparameter: *const ::core::ffi::c_void, dwpinginterval: u32, dwflags: u32) -> ::windows::core::Result<()> {
#[cfg(windows)]
{
#[link(name = "windows")]
extern "system" {
fn RegisterApplicationRecoveryCallback(precoveycallback: ::windows::core::RawPtr, pvparameter: *const ::core::ffi::c_void, dwpinginterval: u32, dwflags: u32) -> ::windows::core::HRESULT;
}
RegisterApplicationRecoveryCallback(::core::mem::transmute(precoveycallback), ::core::mem::transmute(pvparameter), ::core::mem::transmute(dwpinginterval), ::core::mem::transmute(dwflags)).ok()
}
#[cfg(not(windows))]
unimplemented!("Unsupported target OS");
}
#[cfg(feature = "Win32_Foundation")]
#[inline]
pub unsafe fn RegisterApplicationRestart<'a, Param0: ::windows::core::IntoParam<'a, super::super::Foundation::PWSTR>>(pwzcommandline: Param0, dwflags: REGISTER_APPLICATION_RESTART_FLAGS) -> ::windows::core::Result<()> {
#[cfg(windows)]
{
#[link(name = "windows")]
extern "system" {
fn RegisterApplicationRestart(pwzcommandline: super::super::Foundation::PWSTR, dwflags: REGISTER_APPLICATION_RESTART_FLAGS) -> ::windows::core::HRESULT;
}
RegisterApplicationRestart(pwzcommandline.into_param().abi(), ::core::mem::transmute(dwflags)).ok()
}
#[cfg(not(windows))]
unimplemented!("Unsupported target OS");
}
#[inline]
pub unsafe fn UnregisterApplicationRecoveryCallback() -> ::windows::core::Result<()> {
#[cfg(windows)]
{
#[link(name = "windows")]
extern "system" {
fn UnregisterApplicationRecoveryCallback() -> ::windows::core::HRESULT;
}
UnregisterApplicationRecoveryCallback().ok()
}
#[cfg(not(windows))]
unimplemented!("Unsupported target OS");
}
#[inline]
pub unsafe fn UnregisterApplicationRestart() -> ::windows::core::Result<()> {
#[cfg(windows)]
{
#[link(name = "windows")]
extern "system" {
fn UnregisterApplicationRestart() -> ::windows::core::HRESULT;
}
UnregisterApplicationRestart().ok()
}
#[cfg(not(windows))]
unimplemented!("Unsupported target OS");
}
|
pub fn test_number(num: u64) -> bool {
let mut last_digit = num % 10;
let mut number = num / 10;
let mut pair_found = false;
while number != 0 {
let current_digit = number % 10;
if current_digit > last_digit {
return false;
}
if last_digit == current_digit {
pair_found = true;
}
last_digit = current_digit;
number /= 10;
}
if pair_found {
return true;
}
false
}
pub fn run_puzzle() {
let min = 264360;
let max = 746325;
let mut count = 0;
for num in min..max + 1 {
if test_number(num) {
count += 1;
}
}
println!("Result: {}", count);
}
|
use procon_reader::ProconReader;
fn main() {
let stdin = std::io::stdin();
let mut rd = ProconReader::new(stdin.lock());
let a: u32 = rd.get();
let b: u32 = rd.get();
if a <= b {
println!("{}", b - a + 1);
} else {
println!("0");
}
}
|
use ocl;
use rand::{Rng, thread_rng};
use crate::Context;
/// Buffer that stores necessary data for rendering (e.g. collected statistics, rng seeds, etc).
pub struct RenderBuffer {
context: Context,
random: ocl::Buffer<u32>,
color: ocl::Buffer<f32>,
n_passes: usize,
dims: (usize, usize),
}
impl RenderBuffer {
pub fn new(context: &Context, dims: (usize, usize)) -> crate::Result<Self> {
let len = dims.0*dims.1;
let random = ocl::Buffer::<u32>::builder()
.queue(context.queue().clone())
.flags(ocl::flags::MEM_READ_WRITE)
.len(len)
.fill_val(0 as u32)
.build()?;
let mut seed = vec![0u32; len];
thread_rng().fill(&mut seed[..]);
random.cmd()
.offset(0)
.write(&seed)
.enq()?;
let color = ocl::Buffer::<f32>::builder()
.queue(context.queue().clone())
.flags(ocl::flags::MEM_READ_WRITE)
.len(3*len)
.fill_val(0 as f32)
.build()?;
Ok(Self {
context: context.clone(),
random, color,
n_passes: 0,
dims,
})
}
pub fn pass(&mut self) {
self.n_passes += 1;
}
pub fn clear(&mut self) -> crate::Result<()> {
self.color.cmd()
.offset(0)
.fill(0f32, None)
.enq()?;
self.n_passes = 0;
Ok(())
}
pub fn context(&self) -> &Context {
&self.context
}
pub fn random(&self) -> &ocl::Buffer<u32> {
&self.random
}
pub fn random_mut(&mut self) -> &mut ocl::Buffer<u32> {
&mut self.random
}
pub fn color(&self) -> &ocl::Buffer<f32> {
&self.color
}
pub fn color_mut(&mut self) -> &mut ocl::Buffer<f32> {
&mut self.color
}
pub fn n_passes(&self) -> usize {
self.n_passes
}
pub fn dims(&self) -> (usize, usize) {
self.dims
}
pub fn len(&self) -> usize {
self.dims.0*self.dims.1
}
}
|
//! Helper functions for multipart encodings
#[cfg(feature = "multipart_form")]
pub mod form;
#[cfg(feature = "multipart_related")]
pub mod related;
|
pub mod io;
pub mod exchange;
pub mod simulation;
pub mod order;
pub mod controller;
pub mod utility;
use crate::exchange::order_book::Book;
use crate::order::TradeType;
use crate::exchange::queue::Queue;
use crate::controller::State;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate serde_json;
use std::sync::{Mutex, Arc};
pub fn setup_exchange() -> (Arc<Queue>, Arc<Book>, Arc<Book>, Arc<Mutex<State>>) {
let queue = Arc::new(Queue::new());
let bids_book = Arc::new(Book::new(TradeType::Bid));
let asks_book = Arc::new(Book::new(TradeType::Ask));
(queue, bids_book, asks_book, Arc::new(Mutex::new(State::Process)))
}
|
#[derive(Debug)]
pub struct Spu;
impl Spu {
pub fn new() -> Spu {
Spu {}
}
#[allow(unused_variables)]
pub fn write(&mut self, addr: u16, val: u8) {}
#[allow(unused_variables)]
pub fn read(&self, addr: u16) -> u8 {
0
}
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::net::SocketAddr;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
use std::sync::Arc;
use common_base::base::signal_stream;
use common_base::base::DummySignalStream;
use common_base::base::SignalStream;
use common_base::base::SignalType;
use common_exception::Result;
use futures::stream::Abortable;
use futures::StreamExt;
use tokio_stream::wrappers::TcpListenerStream;
use tracing::error;
use tracing::info;
use crate::clusters::ClusterDiscovery;
use crate::sessions::SessionManager;
pub type ListeningStream = Abortable<TcpListenerStream>;
#[async_trait::async_trait]
pub trait Server: Send {
async fn shutdown(&mut self, graceful: bool);
async fn start(&mut self, listening: SocketAddr) -> Result<SocketAddr>;
}
pub struct ShutdownHandle {
shutdown: Arc<AtomicBool>,
sessions: Arc<SessionManager>,
services: Vec<Box<dyn Server>>,
}
impl ShutdownHandle {
pub fn create() -> Result<ShutdownHandle> {
Ok(ShutdownHandle {
sessions: SessionManager::instance(),
services: vec![],
shutdown: Arc::new(AtomicBool::new(false)),
})
}
async fn shutdown_services(&mut self, graceful: bool) {
let mut shutdown_jobs = vec![];
for service in &mut self.services {
shutdown_jobs.push(service.shutdown(graceful));
}
futures::future::join_all(shutdown_jobs).await;
}
pub async fn shutdown(&mut self, mut signal: SignalStream) {
self.shutdown_services(true).await;
ClusterDiscovery::instance()
.unregister_to_metastore(&mut signal)
.await;
self.sessions.graceful_shutdown(signal, 5).await;
self.shutdown_services(false).await;
}
pub async fn wait_for_termination_request(&mut self) {
match signal_stream() {
Err(cause) => {
error!("Cannot set shutdown signal handler, {:?}", cause);
std::process::exit(1);
}
Ok(mut stream) => {
stream.next().await;
info!("Received termination signal.");
if let Ok(false) =
self.shutdown
.compare_exchange(false, true, Ordering::SeqCst, Ordering::Acquire)
{
let shutdown_services = self.shutdown(stream);
shutdown_services.await;
}
}
}
}
pub fn add_service(&mut self, service: Box<dyn Server>) {
self.services.push(service);
}
}
impl Drop for ShutdownHandle {
fn drop(&mut self) {
if let Ok(false) =
self.shutdown
.compare_exchange(false, true, Ordering::SeqCst, Ordering::Acquire)
{
let signal_stream = DummySignalStream::create(SignalType::Exit);
futures::executor::block_on(self.shutdown(signal_stream));
}
}
}
|
use std::fmt;
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Opcode {
Query,
IQuery,
Status,
Notify,
Update,
Reserved,
}
impl Opcode {
pub fn new(value: u8) -> Self {
match value {
0 => Opcode::Query,
1 => Opcode::IQuery,
2 => Opcode::Status,
4 => Opcode::Notify,
5 => Opcode::Update,
_ => Opcode::Reserved,
}
}
pub fn to_u8(self) -> u8 {
match self {
Opcode::Query => 0,
Opcode::IQuery => 1,
Opcode::Status => 2,
Opcode::Notify => 4,
Opcode::Update => 5,
Opcode::Reserved => 6,
}
}
pub fn to_str(self) -> &'static str {
match self {
Opcode::Query => "QUERY",
Opcode::IQuery => "IQUERY",
Opcode::Status => "STATUS",
Opcode::Notify => "NOTIFY",
Opcode::Update => "UPDATE",
Opcode::Reserved => "RESERVED",
}
}
}
impl fmt::Display for Opcode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(self.to_str())
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
pub fn test_rcode_equal() {
assert_eq!(Opcode::Query.to_u8(), 0);
assert_eq!(Opcode::Query.to_string(), "QUERY");
assert_eq!(Opcode::Notify.to_string(), "NOTIFY");
}
}
|
use std::{sync::Arc, time::Duration};
use futures::Future;
use observability_deps::tracing::*;
use tokio::sync::oneshot;
use tokio_util::sync::CancellationToken;
use crate::{
ingest_state::{IngestState, IngestStateError},
partition_iter::PartitionIter,
persist::{drain_buffer::persist_partitions, queue::PersistQueue},
query::projection::OwnedProjection,
wal::reference_tracker::WalReferenceHandle,
};
/// Defines how often the shutdown task polls the partition buffers for
/// emptiness.
///
/// Polls faster in tests to avoid unnecessary delay.
#[cfg(test)]
const SHUTDOWN_POLL_INTERVAL: Duration = Duration::from_millis(50);
#[cfg(not(test))]
const SHUTDOWN_POLL_INTERVAL: Duration = Duration::from_secs(1);
/// Awaits `fut`, before blocking ingest and persisting all data.
///
/// Returns once all outstanding persist jobs have completed (regardless of what
/// started them) and all buffered data has been flushed to object store.
///
/// Correctly accounts for persist jobs that have been started (by a call to
/// [`PartitionData::mark_persisting()`] but not yet enqueued).
///
/// Ingest is blocked by setting [`IngestStateError::GracefulStop`] in the
/// [`IngestState`].
///
/// [`PartitionData::mark_persisting()`]:
/// crate::buffer_tree::partition::PartitionData::mark_persisting()
pub(super) async fn graceful_shutdown_handler<F, T, P>(
fut: F,
complete: oneshot::Sender<()>,
ingest_state: Arc<IngestState>,
buffer: T,
persist: P,
wal: Arc<wal::Wal>,
wal_reference_handle: WalReferenceHandle,
) where
F: Future<Output = CancellationToken> + Send,
T: PartitionIter + Sync,
P: PersistQueue + Clone,
{
// Obtain the cancellation token that stops the RPC server.
let rpc_server_stop = fut.await;
info!("gracefully stopping ingester");
// Reject RPC writes.
//
// There MAY be writes ongoing that started before this state was set.
ingest_state.set(IngestStateError::GracefulStop);
info!("persisting all data before shutdown");
// Drain the buffer tree, persisting all data.
//
// Returns once the persist jobs it starts have complete.
persist_partitions(buffer.partition_iter(), &persist).await;
// There may have been concurrent persist jobs started previously by hot
// partition persistence or WAL rotation (or some other, arbitrary persist
// source) that have not yet completed (this is unlikely). There may also be
// late arriving writes that started before ingest was blocked, but did not
// buffer until after the persist was completed above (also unlikely).
//
// Wait until there is no data in the buffer at all before proceeding,
// therefore ensuring those concurrent persist operations have completed and
// no late arriving data remains buffered.
//
// NOTE: There is a small race in which a late arriving write starts before
// ingest is blocked, is then stalled the entire time partitions are
// persisted, remains stalled while this "empty" check occurs, and then
// springs to life and buffers in the buffer tree after this check has
// completed - I think this is extreme enough to accept as a theoretical
// possibility that doesn't need covering off in practice.
while buffer.partition_iter().any(|p| {
p.lock()
.get_query_data(&OwnedProjection::default())
.is_some()
}) {
if persist_partitions(buffer.partition_iter(), &persist).await != 0 {
// Late arriving writes needed persisting.
debug!("re-persisting late arriving data");
} else {
// At least one partition is returning data, and there is no data to
// start persisting, therefore there is an outstanding persist
// operation that hasn't yet been marked as complete.
debug!("waiting for concurrent persist to complete");
}
tokio::time::sleep(SHUTDOWN_POLL_INTERVAL).await;
}
// Register interest with the WAL reference handle to notify this thread
// when there are no tracked inactive WAL segments, ensuring they are
// deleted before shutdown (so as not to be replayed).
//
// This future MUST be created before the active WAL segment is rotated
// out and enqueued, but `await`ed on afterwards. Failure to do so may
// cause the notifier to deadlock if the inactive segment tracking set
// empties before the notifier is created.
//
// TL;DR: Please read the docs for [`WalReferenceHandle::empty_inactive_notifier()`]
// before moving this about.
let empty_waker = wal_reference_handle.empty_inactive_notifier();
// There is now no data buffered in the ingester - all data has been
// persisted to object storage.
//
// We can rotate the open WAL segment and notify the reference handle
// that the segment's file can be deleted because everything has been
// persisted.
let (closed_segment, sequence_number_set) = wal.rotate().expect("failed to rotate wal");
let rx = wal_reference_handle
.enqueue_rotated_file(closed_segment.id(), sequence_number_set)
.await;
if let Err(e) = rx.await {
error!(%e, "encountered failure waiting on file rotation receiver during shutdown");
};
// Wait for the file rotation to be processed and the tracked set
// to drop to empty.
empty_waker.await;
info!("persisted all data - stopping ingester");
// Stop the RPC server (and therefore stop accepting new queries)
rpc_server_stop.cancel();
// And signal the ingester has stopped.
let _ = complete.send(());
}
#[cfg(test)]
mod tests {
use std::{future::ready, sync::Arc, task::Poll};
use assert_matches::assert_matches;
use data_types::SequenceNumber;
use futures::FutureExt;
use mutable_batch_lp::test_helpers::lp_to_mutable_batch;
use parking_lot::Mutex;
use test_helpers::timeout::FutureTimeout;
use crate::{
buffer_tree::partition::PartitionData,
persist::queue::mock::MockPersistQueue,
test_util::{PartitionDataBuilder, ARBITRARY_TABLE_NAME},
};
use super::*;
// Initialise a partition containing buffered data.
fn new_partition() -> Arc<Mutex<PartitionData>> {
let mut partition = PartitionDataBuilder::new().build();
let mb = lp_to_mutable_batch(&format!(
r#"{},city=London people=2,pigeons="millions" 10"#,
&*ARBITRARY_TABLE_NAME
))
.1;
partition
.buffer_write(mb, SequenceNumber::new(1))
.expect("failed to write dummy data");
Arc::new(Mutex::new(partition))
}
// Initialise a WAL.
async fn new_wal() -> (tempfile::TempDir, Arc<wal::Wal>) {
let dir = tempfile::tempdir().expect("failed to get temporary WAL directory");
let wal = wal::Wal::new(dir.path())
.await
.expect("failed to initialise WAL to write");
(dir, wal)
}
#[tokio::test]
async fn test_graceful_shutdown() {
let ingest_state = Arc::new(IngestState::default());
let (_tempdir, wal) = new_wal().await;
let (wal_reference_handle, wal_reference_actor) =
WalReferenceHandle::new(Arc::clone(&wal), &metric::Registry::default());
let persist = Arc::new(MockPersistQueue::new_with_observer(
wal_reference_handle.clone(),
));
tokio::spawn(wal_reference_actor.run());
// Ensure there is always more than 1 segment in the test, but notify the ref tracker.
let (closed_segment, set) = wal.rotate().expect("failed to rotate WAL");
wal_reference_handle
.enqueue_rotated_file(closed_segment.id(), set)
.await;
let partition = new_partition();
let rpc_stop = CancellationToken::new();
let (tx, rx) = oneshot::channel();
graceful_shutdown_handler(
ready(rpc_stop.clone()),
tx,
ingest_state,
vec![Arc::clone(&partition)],
Arc::clone(&persist),
Arc::clone(&wal),
wal_reference_handle,
)
.await;
// Wait for the shutdown to complete.
rx.with_timeout_panic(Duration::from_secs(5))
.await
.expect("shutdown task panicked");
assert!(rpc_stop.is_cancelled());
// Assert the data was persisted
let persist_calls = persist.calls();
assert_matches!(&*persist_calls, [p] => {
assert!(Arc::ptr_eq(p, &partition));
});
// Assert there are now no WAL segment files that will be replayed
assert!(wal.closed_segments().is_empty());
}
#[tokio::test]
async fn test_graceful_shutdown_concurrent_persist() {
let ingest_state = Arc::new(IngestState::default());
let (_tempdir, wal) = new_wal().await;
let (wal_reference_handle, wal_reference_actor) =
WalReferenceHandle::new(Arc::clone(&wal), &metric::Registry::default());
let persist = Arc::new(MockPersistQueue::new_with_observer(
wal_reference_handle.clone(),
));
tokio::spawn(wal_reference_actor.run());
// Ensure there is always more than 1 segment in the test, but notify the ref tracker.
let (closed_segment, set) = wal.rotate().expect("failed to rotate WAL");
wal_reference_handle
.enqueue_rotated_file(closed_segment.id(), set)
.await;
let partition = new_partition();
// Mark the partition as persisting
let persist_job = partition
.lock()
.mark_persisting()
.expect("non-empty partition should begin persisting");
// Start the graceful shutdown job in another thread, as it SHOULD block
// until the persist job is marked as complete.
let rpc_stop = CancellationToken::new();
let (tx, rx) = oneshot::channel();
let handle = tokio::spawn(graceful_shutdown_handler(
ready(rpc_stop.clone()),
tx,
ingest_state,
vec![Arc::clone(&partition)],
Arc::clone(&persist),
Arc::clone(&wal),
wal_reference_handle,
));
// Wait a small duration of time for the first buffer emptiness check to
// fire.
tokio::time::sleep(Duration::from_millis(200)).await;
// Assert the shutdown hasn't completed.
//
// This is racy, but will fail false negative and will not flake in CI.
// If this fails in CI, it is a legitimate bug (shutdown task should not
// have stopped).
let rx = rx.shared();
assert_matches!(futures::poll!(rx.clone()), Poll::Pending);
// And because the shutdown is still ongoing, the RPC server must not
// have been signalled to stop.
assert!(!rpc_stop.is_cancelled());
// Mark the persist job as having completed, unblocking the shutdown
// task.
partition.lock().mark_persisted(persist_job);
// Wait for the shutdown to complete.
rx.with_timeout_panic(Duration::from_secs(5))
.await
.expect("shutdown task panicked");
assert!(rpc_stop.is_cancelled());
assert!(handle
.with_timeout_panic(Duration::from_secs(1))
.await
.is_ok());
// Assert the data was not passed to the persist task (it couldn't have
// been, as this caller held the PersistData)
assert!(persist.calls().is_empty());
// Assert there are now no WAL segment files that will be replayed
assert!(wal.closed_segments().is_empty());
}
/// An implementation of [`PartitionIter`] that yields an extra new,
/// non-empty partition each time [`PartitionIter::partition_iter()`] is
/// called.
#[derive(Debug)]
struct SneakyPartitionBuffer {
max: usize,
partitions: Mutex<Vec<Arc<Mutex<PartitionData>>>>,
}
impl SneakyPartitionBuffer {
fn new(max: usize) -> Self {
Self {
max,
partitions: Default::default(),
}
}
fn partitions(&self) -> Vec<Arc<Mutex<PartitionData>>> {
self.partitions.lock().clone()
}
}
impl PartitionIter for SneakyPartitionBuffer {
fn partition_iter(&self) -> Box<dyn Iterator<Item = Arc<Mutex<PartitionData>>> + Send> {
let mut partitions = self.partitions.lock();
// If this hasn't reached the maximum number of times to be sneaky,
// add another partition.
if partitions.len() != self.max {
partitions.push(new_partition());
}
Box::new(partitions.clone().into_iter())
}
}
#[tokio::test]
async fn test_graceful_shutdown_concurrent_new_writes() {
let ingest_state = Arc::new(IngestState::default());
let (_tempdir, wal) = new_wal().await;
let (wal_reference_handle, wal_reference_actor) =
WalReferenceHandle::new(Arc::clone(&wal), &metric::Registry::default());
let persist = Arc::new(MockPersistQueue::new_with_observer(
wal_reference_handle.clone(),
));
tokio::spawn(wal_reference_actor.run());
// Ensure there is always more than 1 segment in the test, but notify the ref tracker.
let (closed_segment, set) = wal.rotate().expect("failed to rotate WAL");
wal_reference_handle
.enqueue_rotated_file(closed_segment.id(), set)
.await;
// Initialise a buffer that keeps yielding more and more newly wrote
// data, up until the maximum.
const MAX_NEW_PARTITIONS: usize = 3;
let buffer = Arc::new(SneakyPartitionBuffer::new(MAX_NEW_PARTITIONS));
// Start the graceful shutdown job in another thread, as it SHOULD block
// until the persist job is marked as complete.
let rpc_stop = CancellationToken::new();
let (tx, rx) = oneshot::channel();
let handle = tokio::spawn(graceful_shutdown_handler(
ready(rpc_stop.clone()),
tx,
ingest_state,
Arc::clone(&buffer),
Arc::clone(&persist),
Arc::clone(&wal),
wal_reference_handle.clone(),
));
// Wait for the shutdown to complete.
rx.with_timeout_panic(Duration::from_secs(5))
.await
.expect("shutdown task panicked");
assert!(rpc_stop.is_cancelled());
assert!(handle
.with_timeout_panic(Duration::from_secs(1))
.await
.is_ok());
// Assert all the data yielded by the sneaky buffer was passed to the
// persist task.
let persist_calls = persist.calls();
let must_have_persisted = |p: &Arc<Mutex<PartitionData>>| {
for call in &persist_calls {
if Arc::ptr_eq(call, p) {
return true;
}
}
false
};
if !buffer.partitions().iter().all(must_have_persisted) {
panic!("at least one sneaky buffer was not passed to the persist system");
}
// Assert there are now no WAL segment files that will be replayed
assert!(wal.closed_segments().is_empty());
}
}
|
use proc_macro2::TokenStream;
use quote::ToTokens;
use syn::{visit_mut::VisitMut, *};
use crate::utils::*;
mod context;
mod expr;
#[cfg(feature = "type_analysis")]
mod type_analysis;
mod visitor;
use self::context::{Context, VisitLastMode, VisitMode, DEFAULT_MARKER};
use self::expr::child_expr;
/// The attribute name.
const NAME: &str = "auto_enum";
/// The annotation for recursively parsing.
const NESTED: &str = "nested";
/// The annotation for skipping branch.
const NEVER: &str = "never";
/// The annotations used by `#[auto_enum]`.
const EMPTY_ATTRS: &[&str] = &[NEVER, NESTED];
/// The old annotation replaced by `#[nested]`.
const NESTED_OLD: &str = "rec";
pub(crate) fn attribute(args: TokenStream, input: TokenStream) -> TokenStream {
expand(args, input).unwrap_or_else(|e| e.to_compile_error())
}
fn expand(args: TokenStream, input: TokenStream) -> Result<TokenStream> {
let mut cx = Context::root(input.clone(), args)?;
let res = match syn::parse2::<Stmt>(input.clone()) {
Ok(mut stmt) => expand_parent_stmt(&mut stmt, &mut cx).map(|()| stmt.into_token_stream()),
Err(e) => syn::parse2::<Expr>(input)
.map_err(|_e| {
cx.diagnostic.error(e);
error!(cx.span, "may only be used on expression, statement, or function")
})
.and_then(|mut expr| {
expand_parent_expr(&mut expr, &mut cx).map(|()| expr.into_token_stream())
}),
};
match res {
Err(e) => cx.diagnostic.error(e),
Ok(_) if cx.failed() => {}
Ok(tokens) => return Ok(tokens),
}
Err(cx.diagnostic.get_inner().unwrap())
}
fn visit_expr(expr: &mut Expr, cx: &mut Context) -> Result<()> {
let expr = match expr {
Expr::Closure(ExprClosure { body, .. }) if cx.visit_last() => {
cx.visit_mode = VisitMode::Return;
cx.visit_last_mode = VisitLastMode::Closure;
cx.find_try(|v| v.visit_expr_mut(body));
&mut **body
}
_ => expr,
};
child_expr(expr, cx).map(|()| cx.visitor(|v| v.visit_expr_mut(expr)))
}
fn build_expr(expr: &mut Expr, item: ItemEnum) {
replace_expr(expr, |expr| expr_block(block(vec![Stmt::Item(item.into()), Stmt::Expr(expr)])));
}
// =================================================================================================
// Expand statement or expression in which `#[auto_enum]` was directly used.
fn expand_parent_stmt(stmt: &mut Stmt, cx: &mut Context) -> Result<()> {
if let Stmt::Semi(..) = &stmt {
cx.visit_last_mode = VisitLastMode::Never;
}
match stmt {
Stmt::Expr(expr) | Stmt::Semi(expr, _) => expand_parent_expr(expr, cx),
Stmt::Local(local) => expand_parent_local(local, cx),
Stmt::Item(Item::Fn(item)) => expand_parent_item_fn(item, cx),
Stmt::Item(item) => {
Err(error!(item, "may only be used on expression, statement, or function"))
}
}
}
fn expand_parent_expr(expr: &mut Expr, cx: &mut Context) -> Result<()> {
if cx.is_dummy() {
cx.dummy(|v| v.visit_expr_mut(expr));
return Ok(());
}
visit_expr(expr, cx)?;
cx.build(|item| build_expr(expr, item))
}
fn expand_parent_local(local: &mut Local, cx: &mut Context) -> Result<()> {
#[cfg(feature = "type_analysis")]
{
if let Pat::Type(PatType { ty, .. }) = &mut local.pat {
cx.collect_trait(&mut *ty);
}
}
if cx.is_dummy() {
cx.dummy(|v| v.visit_local_mut(local));
return Ok(());
}
let expr = if let Some(expr) = local.init.as_mut().map(|(_, expr)| &mut **expr) {
expr
} else {
return Err(error!(
local,
"the `#[auto_enum]` attribute is not supported uninitialized let statement"
));
};
visit_expr(expr, cx)?;
cx.build(|item| build_expr(expr, item))
}
fn expand_parent_item_fn(item: &mut ItemFn, cx: &mut Context) -> Result<()> {
let ItemFn { sig, block, .. } = item;
if let ReturnType::Type(_, ty) = &mut sig.output {
match &**ty {
// `return`
Type::ImplTrait(_) if cx.visit_last() => cx.visit_mode = VisitMode::Return,
// `?` operator
Type::Path(TypePath { qself: None, path }) if cx.visit_last() => {
let PathSegment { arguments, ident } = &path.segments[path.segments.len() - 1];
match arguments {
// `Result<T, impl Trait>`
PathArguments::AngleBracketed(AngleBracketedGenericArguments {
colon2_token: None,
args,
..
}) if args.len() == 2 && ident == "Result" => {
if let (
GenericArgument::Type(_),
GenericArgument::Type(Type::ImplTrait(_)),
) = (&args[0], &args[1])
{
cx.find_try(|v| v.visit_block_mut(&mut **block));
}
}
_ => {}
}
}
_ => {}
}
#[cfg(feature = "type_analysis")]
cx.collect_trait(&mut *ty);
}
if cx.is_dummy() {
cx.dummy(|v| v.visit_item_fn_mut(item));
return Ok(());
}
match item.block.stmts.last_mut() {
Some(Stmt::Expr(expr)) => child_expr(expr, cx)?,
Some(_) => {}
None => {
return Err(error!(
item.block,
"the `#[auto_enum]` attribute is not supported empty functions"
));
}
}
cx.visitor(|v| v.visit_item_fn_mut(item));
cx.build(|i| item.block.stmts.insert(0, Stmt::Item(i.into())))
}
|
use proconio::input;
fn main() {
input! {
n: usize,
};
println!("{}{}", n / 10 % 10, n % 10);
}
|
#[cfg(all(not(target_arch = "wasm32"), test))]
mod test;
use liblumen_alloc::erts::exception;
use liblumen_alloc::erts::term::prelude::*;
/// `xor/2` infix operator.
///
/// **NOTE: NOT SHORT-CIRCUITING!**
#[native_implemented::function(erlang:xor/2)]
pub fn result(left_boolean: Term, right_boolean: Term) -> exception::Result<Term> {
boolean_infix_operator!(left_boolean, right_boolean, ^)
}
|
mod sha256;
mod sha512;
pub use sha256::*;
pub use sha512::*;
|
pub struct Base {
pub title: String,
pub status: String
}
impl Base {
pub fn new(input_title: String, input_status: String) -> Base {
return Base {title: input_title, status: input_status}
}
} |
mod tcp;
pub use self::tcp::{TcpStream,TcpClient};
|
// SPDX-License-Identifier: MIT
// Copyright (c) 2021-2022 brainpower <brainpower at mailbox dot org>
#![feature(assert_matches)]
#[cfg(test)]
mod tests {
use checkarg::{CheckArg, ValueType, RC};
use std::assert_matches::assert_matches;
fn triggering_help(option: &str) {
let argv = vec!["/test01", option];
let mut cb_ran = 0;
let mut out = String::new();
{
let mut ca = CheckArg::new("test01");
// the default callback calls exit(0), which is bad here
// ca.add_autohelp();
ca.add_cb(
'h',
"help",
"show this help message and exit",
|cx, _opt, _val| {
cb_ran += 1;
out = cx.autohelp();
Ok(())
},
ValueType::None,
None,
);
ca.add(
'i',
"input",
"file to read from",
ValueType::Required,
Some(""),
);
let rc = ca.parse(&argv);
assert_matches!(rc, RC::Ok, "parsing failed");
}
assert_eq!(cb_ran, 1, "callback did not run");
assert_eq!(
out,
"\
Usage: test01 [options]
Options:
-h, --help show this help message and exit
-i, --input file to read from\n"
);
}
#[test]
fn triggering_help_long_option() { triggering_help("--help") }
#[test]
fn triggering_help_short_option() { triggering_help("-h") }
fn setup_simple_ca() -> CheckArg<'static> {
let mut ca = CheckArg::new("name");
ca.add(
'i',
"input",
"file to read from",
ValueType::Required,
Some(""),
);
ca
}
#[test]
fn correct_option_and_value() {
let mut ca = setup_simple_ca();
let argv = vec!["/usr/bin/name", "-i", "input.in"];
let rc = ca.parse(&argv);
assert_matches!(rc, RC::Ok, "parsing failed");
assert!(ca.isset("input"));
assert_matches!(ca.value("input"), Some(input) if input == "input.in");
assert_eq!(ca.value("input").unwrap(), "input.in");
}
#[test]
fn invalid_option() {
let mut ca = setup_simple_ca();
let argv = vec!["/usr/bin/name", "-x"];
let rc = ca.parse(&argv);
assert_matches!(rc, RC::InvOpt, "parse should detect invalid option");
}
#[test]
fn missing_value() {
let mut ca = setup_simple_ca();
let argv = vec!["/usr/bin/name", "-i"];
let rc = ca.parse(&argv);
assert_matches!(rc, RC::MissVal, "parse should detect missing value");
}
#[test]
#[should_panic(expected = "argv must have at least one element")]
fn empty_argv_vector() {
let mut ca = setup_simple_ca();
let argv = vec![];
ca.parse(&argv);
}
}
|
pub use VkComponentSwizzle::*;
#[repr(u32)]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum VkComponentSwizzle {
VK_COMPONENT_SWIZZLE_IDENTITY = 0,
VK_COMPONENT_SWIZZLE_ZERO = 1,
VK_COMPONENT_SWIZZLE_ONE = 2,
VK_COMPONENT_SWIZZLE_R = 3,
VK_COMPONENT_SWIZZLE_G = 4,
VK_COMPONENT_SWIZZLE_B = 5,
VK_COMPONENT_SWIZZLE_A = 6,
}
|
use std::cmp::Reverse;
use std::cmp::{max, min};
use std::collections::{HashMap, HashSet};
use itertools::Itertools;
use whiteread::parse_line;
const ten97: usize = 1000000007;
fn alphabet2idx(c: char) -> usize {
if c.is_ascii_lowercase() {
c as u8 as usize - 'a' as u8 as usize
} else if c.is_ascii_uppercase() {
c as u8 as usize - 'A' as u8 as usize
} else {
panic!("wtf")
}
}
// SCC: 強連結成分
// https://hkawabata.github.io/technical-note/note/Algorithm/graph/scc.html
fn main() {
let (n, m): (usize, usize) = parse_line().unwrap();
let mut paths: Vec<Vec<usize>> = vec![vec![]; n + 1];
for _ in 0..m {
let (a, b): (usize, usize) = parse_line().unwrap();
paths[a].push(b);
}
let groups = scc(n, &paths);
// 最後に何通りの(x, y)があるか?
let mut ans = 0;
for g in groups {
if g.len() > 1 {
ans += g.len() * (g.len() - 1) / 2;
}
}
println!("{}", ans);
}
/// n: nodeの数
/// paths:
fn scc(n: usize, paths: &Vec<Vec<usize>>) -> Vec<Vec<usize>> {
fn dfs(
index: &mut Vec<usize>,
counter: &mut usize,
now: usize,
already: &mut Vec<bool>,
paths: &Vec<Vec<usize>>,
) {
for p in paths[now].iter() {
if !already[*p] {
already[*p] = true;
dfs(index, counter, *p, already, paths);
*counter += 1;
index[*counter] = *p;
}
}
}
// 帰り順にindexに数字が入る
let mut already: Vec<bool> = vec![false; n + 1];
let mut index: Vec<usize> = vec![0; n + 1];
let mut counter = 0;
for i in 1..=n {
if already[i] {
continue;
}
already[i] = true;
dfs(&mut index, &mut counter, i, &mut already, &paths);
counter += 1;
index[counter] = i;
}
let mut invpaths: Vec<Vec<usize>> = vec![vec![]; n + 1];
for (s, sp) in paths.iter().enumerate() {
for g in sp.iter() {
invpaths[*g].push(s);
}
}
// 大きい数字からDFS(invpathsのグラフにおいて最下流からDFS)
let mut already = vec![false; n + 1];
let mut groups: Vec<Vec<usize>> = vec![];
for i in (1..=n).rev() {
let s = index[i];
let mut group = vec![];
if already[s] {
continue;
}
let mut stack = vec![s];
already[s] = true;
while !stack.is_empty() {
let t = stack.pop().unwrap();
group.push(t);
for p in invpaths[t].iter() {
if already[*p] {
continue;
}
stack.push(*p);
already[*p] = true;
}
}
groups.push(group);
}
return groups;
}
|
#[cfg(feature = "client")]
mod playdevice;
#[cfg(feature = "client")]
mod recdevice;
#[cfg(feature = "client")]
pub use self::playdevice::*;
#[cfg(feature = "client")]
pub use self::recdevice::*;
use crate::vars::DEFAULT_SAMPLES_PER_SECOND;
use std::io::Write;
use std::path::Path;
#[cfg(feature = "client")]
use std::io::Cursor;
use log::warn;
use ogg_opus::encode;
use thiserror::Error;
#[derive(Debug, Clone)]
struct AudioEncoded {
data: Vec<u8>,
}
impl AudioEncoded {
fn new(data: Vec<u8>) -> Self {
Self { data }
}
#[cfg(feature = "client")]
pub fn is_ogg_opus(&self) -> bool {
ogg_opus::is_ogg_opus(Cursor::new(&self.data))
}
pub fn get_sps(&self) -> u32 {
// Just some value, not yet implemented
// TODO: Finish it
warn!("AudioEncoded::get_sps not yet implemented");
48000
}
}
#[derive(Debug, Clone)]
enum Data {
Raw(AudioRaw),
Encoded(AudioEncoded),
}
impl Data {
fn clear(&mut self) {
match self {
Data::Raw(raw_data) => raw_data.clear(),
Data::Encoded(enc_data) => enc_data.data.clear(),
}
}
fn append_raw(&mut self, b: &[i16]) {
match self {
Data::Raw(data_self) => data_self
.append_audio(b, DEFAULT_SAMPLES_PER_SECOND)
.expect("Tried to append but one of the raw data wasn't using default sps"),
Data::Encoded(_) => std::panic!("Tried to append a raw audio to an encoded audio"),
}
}
fn is_raw(&self) -> bool {
match self {
Data::Raw(_) => true,
Data::Encoded(_) => false,
}
}
fn len(&self) -> usize {
match self {
Data::Raw(buffer) => buffer.len(),
Data::Encoded(buffer) => buffer.data.len(),
}
}
fn get_sps(&self) -> u32 {
match self {
Data::Raw(_) => DEFAULT_SAMPLES_PER_SECOND,
Data::Encoded(data) => data.get_sps(),
}
}
}
// Just some and audio dummy for now
#[derive(Debug, Clone)]
pub struct Audio {
buffer: Data,
}
impl Audio {
pub fn new_empty(samples_per_second: u32) -> Self {
assert_eq!(samples_per_second, DEFAULT_SAMPLES_PER_SECOND);
Self {
buffer: Data::Raw(AudioRaw::new_empty(samples_per_second)),
}
}
pub fn new_raw(buffer: Vec<i16>, samples_per_second: u32) -> Self {
assert_eq!(samples_per_second, DEFAULT_SAMPLES_PER_SECOND);
Self {
buffer: Data::Raw(AudioRaw::new_raw(buffer, samples_per_second)),
}
}
pub fn new_encoded(buffer: Vec<u8>) -> Self {
Self {
buffer: Data::Encoded(AudioEncoded::new(buffer)),
}
}
pub fn append_raw(&mut self, other: &[i16], samples_per_second: u32) -> Option<()> {
if self.buffer.is_raw() {
assert_eq!(samples_per_second, DEFAULT_SAMPLES_PER_SECOND);
self.buffer.append_raw(other);
Some(())
} else {
// Can't join if it's not the same sample rate
None
}
}
pub fn write_ogg(&self, file_path: &Path) -> Result<(), AudioError> {
match &self.buffer {
Data::Raw(audio_raw) => {
let as_ogg = audio_raw.to_ogg_opus()?;
let mut file = std::fs::File::create(file_path)?;
file.write_all(&as_ogg)?;
}
Data::Encoded(vec_data) => {
let mut file = std::fs::File::create(file_path)?;
file.write_all(&vec_data.data)?;
}
}
Ok(())
}
pub fn into_encoded(self) -> Result<Vec<u8>, AudioError> {
match self.buffer {
Data::Raw(audio_raw) => audio_raw.to_ogg_opus(),
Data::Encoded(vec_data) => Ok(vec_data.data),
}
}
pub fn clear(&mut self) {
self.buffer.clear();
}
// Length in seconds
pub fn len_s(&self) -> f32 {
let len = self.buffer.len();
(len as f32) / (self.buffer.get_sps() as f32)
}
pub fn from_raw(raw: AudioRaw) -> Self {
Self {
buffer: Data::Raw(raw),
}
}
}
// For managing raw audio, mostly coming from the mic,
// is fixed at 16 KHz and mono (what most STTs )
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct AudioRaw {
pub buffer: Vec<i16>,
}
impl AudioRaw {
pub fn get_samples_per_second() -> u32 {
DEFAULT_SAMPLES_PER_SECOND
}
pub fn new_empty(samples_per_second: u32) -> Self {
assert!(samples_per_second == Self::get_samples_per_second());
AudioRaw { buffer: Vec::new() }
}
pub fn new_raw(buffer: Vec<i16>, samples_per_second: u32) -> Self {
assert!(samples_per_second == Self::get_samples_per_second());
AudioRaw { buffer }
}
pub fn clear(&mut self) {
self.buffer.clear()
}
pub fn append_audio(&mut self, other: &[i16], sps: u32) -> Result<(), AudioError> {
if sps == Self::get_samples_per_second() {
self.buffer.extend(other);
Ok(())
} else {
Err(AudioError::IncompatibleSps)
}
}
pub fn len(&self) -> usize {
self.buffer.len()
}
pub fn rms(&self) -> f64 {
let sqr_sum = self
.buffer
.iter()
.fold(0i64, |sqr_sum, s| sqr_sum + (*s as i64) * (*s as i64));
(sqr_sum as f64 / self.buffer.len() as f64).sqrt()
}
// Length in seconds
pub fn len_s(&self) -> f32 {
let len = self.buffer.len();
(len as f32) / (Self::get_samples_per_second() as f32)
}
pub fn save_to_disk(&self, path: &Path) -> Result<(), AudioError> {
let ogg = self.to_ogg_opus()?;
let mut file = std::fs::File::create(path)?;
file.write_all(&ogg)?;
Ok(())
}
pub fn to_ogg_opus(&self) -> Result<Vec<u8>, AudioError> {
Ok(encode::<16000, 1>(&self.buffer)?)
}
}
#[derive(Error, Debug)]
pub enum AudioError {
#[error("Io Error")]
IOError(#[from] std::io::Error),
#[error("Incompatible Samples per seconds")]
IncompatibleSps,
#[error("")]
OggOpusError(#[from] ogg_opus::Error),
}
|
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
pub struct Message {
pub targets: Vec<String>,
pub message: String,
}
impl Message {
pub fn easy_parse(input: &str) -> Option<Message> {
let (dest, msg) = input
.find(':')
.map(|idx| (&input[..idx], input[idx + 1..].trim()))?;
let targets: Vec<String> = dest
.split(',')
.map(|name| name.trim().to_string())
.collect();
let message: String = msg.to_string();
Some(Message { targets, message })
}
}
|
use crate::error;
use arrow::datatypes::{DataType, TimeUnit};
use datafusion::common::tree_node::{Transformed, TreeNode, VisitRecursion};
use datafusion::common::{DFSchemaRef, Result};
use datafusion::logical_expr::utils::expr_as_column_expr;
use datafusion::logical_expr::{lit, Expr, ExprSchemable, LogicalPlan, Operator};
use datafusion::scalar::ScalarValue;
use influxdb_influxql_parser::expression::BinaryOperator;
use influxdb_influxql_parser::literal::Number;
use influxdb_influxql_parser::string::Regex;
use query_functions::clean_non_meta_escapes;
use query_functions::coalesce_struct::coalesce_struct;
use schema::InfluxColumnType;
use std::sync::Arc;
use super::ir::{DataSourceSchema, Field};
pub(in crate::plan) fn binary_operator_to_df_operator(op: BinaryOperator) -> Operator {
match op {
BinaryOperator::Add => Operator::Plus,
BinaryOperator::Sub => Operator::Minus,
BinaryOperator::Mul => Operator::Multiply,
BinaryOperator::Div => Operator::Divide,
BinaryOperator::Mod => Operator::Modulo,
BinaryOperator::BitwiseAnd => Operator::BitwiseAnd,
BinaryOperator::BitwiseOr => Operator::BitwiseOr,
BinaryOperator::BitwiseXor => Operator::BitwiseXor,
}
}
/// Container for the DataFusion schema as well as
/// info on which columns are tags.
pub(in crate::plan) struct IQLSchema<'a> {
pub(in crate::plan) df_schema: DFSchemaRef,
tag_info: TagInfo<'a>,
}
impl<'a> IQLSchema<'a> {
/// Create a new IQLSchema from a [`DataSourceSchema`] from the
/// FROM clause of a query or subquery.
pub(in crate::plan) fn new_from_ds_schema(
df_schema: &DFSchemaRef,
ds_schema: DataSourceSchema<'a>,
) -> Result<Self> {
Ok(Self {
df_schema: Arc::clone(df_schema),
tag_info: TagInfo::DataSourceSchema(ds_schema),
})
}
/// Create a new IQLSchema from a list of [`Field`]s on the SELECT list
/// of a subquery.
pub(in crate::plan) fn new_from_fields(
df_schema: &DFSchemaRef,
fields: &'a [Field],
) -> Result<Self> {
Ok(Self {
df_schema: Arc::clone(df_schema),
tag_info: TagInfo::FieldList(fields),
})
}
/// Returns `true` if the schema contains a tag column with the specified name.
pub fn is_tag_field(&self, name: &str) -> bool {
match self.tag_info {
TagInfo::DataSourceSchema(ref ds_schema) => ds_schema.is_tag_field(name),
TagInfo::FieldList(fields) => fields
.iter()
.any(|f| f.name == name && f.data_type == Some(InfluxColumnType::Tag)),
}
}
/// Returns `true` if the schema contains a tag column with the specified name.
/// If the underlying data source is a subquery, it will apply any aliases in the
/// projection that represents the SELECT list.
pub fn is_projected_tag_field(&self, name: &str) -> bool {
match self.tag_info {
TagInfo::DataSourceSchema(ref ds_schema) => ds_schema.is_projected_tag_field(name),
_ => self.is_tag_field(name),
}
}
}
pub(in crate::plan) enum TagInfo<'a> {
DataSourceSchema(DataSourceSchema<'a>),
FieldList(&'a [Field]),
}
/// Sanitize an InfluxQL regular expression and create a compiled [`regex::Regex`].
pub(crate) fn parse_regex(re: &Regex) -> Result<regex::Regex> {
let pattern = clean_non_meta_escapes(re.as_str());
regex::Regex::new(&pattern)
.map_err(|e| error::map::query(format!("invalid regular expression '{re}': {e}")))
}
/// Returns `n` as a scalar value of the specified `data_type`.
fn number_to_scalar(n: &Number, data_type: &DataType) -> Result<ScalarValue> {
Ok(match (n, data_type) {
(Number::Integer(v), DataType::Int64) => ScalarValue::from(*v),
(Number::Integer(v), DataType::Float64) => ScalarValue::from(*v as f64),
(Number::Integer(v), DataType::UInt64) => ScalarValue::from(*v as u64),
(Number::Integer(v), DataType::Timestamp(TimeUnit::Nanosecond, tz)) => {
ScalarValue::TimestampNanosecond(Some(*v), tz.clone())
}
(Number::Float(v), DataType::Int64) => ScalarValue::from(*v as i64),
(Number::Float(v), DataType::Float64) => ScalarValue::from(*v),
(Number::Float(v), DataType::UInt64) => ScalarValue::from(*v as u64),
(Number::Float(v), DataType::Timestamp(TimeUnit::Nanosecond, tz)) => {
ScalarValue::TimestampNanosecond(Some(*v as i64), tz.clone())
}
(n, DataType::Struct(fields)) => ScalarValue::Struct(
Some(
fields
.iter()
.map(|f| number_to_scalar(n, f.data_type()))
.collect::<Result<Vec<_>>>()?,
),
fields.clone(),
),
(_, DataType::Null) => ScalarValue::Null,
(n, data_type) => {
// The only output data types expected are Int64, Float64 or UInt64
return error::internal(format!("no conversion from {n} to {data_type}"));
}
})
}
/// Rebuilds an `Expr` as a projection on top of a collection of `Expr`'s.
///
/// For example, the expression `a + b < 1` would require, as input, the 2
/// individual columns, `a` and `b`. But, if the base expressions already
/// contain the `a + b` result, then that may be used in lieu of the `a` and
/// `b` columns.
///
/// This is useful in the context of a query like:
///
/// SELECT a + b < 1 ... GROUP BY a + b
///
/// where post-aggregation, `a + b` need not be a projection against the
/// individual columns `a` and `b`, but rather it is a projection against the
/// `a + b` found in the GROUP BY.
///
/// `fill_if_null` will be used to coalesce any expressions from `NULL`.
/// This is used with the `FILL(<value>)` strategy.
pub(crate) fn rebase_expr(
expr: &Expr,
base_exprs: &[Expr],
fill_if_null: &Option<Number>,
plan: &LogicalPlan,
) -> Result<Expr> {
if let Some(value) = fill_if_null {
expr.clone().transform_up(&|nested_expr| {
Ok(if base_exprs.contains(&nested_expr) {
let col_expr = expr_as_column_expr(&nested_expr, plan)?;
let data_type = col_expr.get_type(plan.schema())?;
Transformed::Yes(coalesce_struct(vec![
col_expr,
lit(number_to_scalar(value, &data_type)?),
]))
} else {
Transformed::No(nested_expr)
})
})
} else {
expr.clone().transform_up(&|nested_expr| {
Ok(if base_exprs.contains(&nested_expr) {
Transformed::Yes(expr_as_column_expr(&nested_expr, plan)?)
} else {
Transformed::No(nested_expr)
})
})
}
}
pub(crate) fn contains_expr(expr: &Expr, needle: &Expr) -> bool {
let mut found = false;
expr.apply(&mut |expr| {
if expr == needle {
found = true;
Ok(VisitRecursion::Stop)
} else {
Ok(VisitRecursion::Continue)
}
})
.expect("cannot fail");
found
}
/// Search the provided `Expr`'s, and all of their nested `Expr`, for any that
/// pass the provided test. The returned `Expr`'s are deduplicated and returned
/// in order of appearance (depth first).
///
/// # NOTE
///
/// Copied from DataFusion
pub(crate) fn find_exprs_in_exprs<F>(exprs: &[Expr], test_fn: &F) -> Vec<Expr>
where
F: Fn(&Expr) -> bool,
{
exprs
.iter()
.flat_map(|expr| find_exprs_in_expr(expr, test_fn))
.fold(vec![], |mut acc, expr| {
if !acc.contains(&expr) {
acc.push(expr)
}
acc
})
}
/// Search an `Expr`, and all of its nested `Expr`'s, for any that pass the
/// provided test. The returned `Expr`'s are deduplicated and returned in order
/// of appearance (depth first).
///
/// # NOTE
///
/// Copied from DataFusion
fn find_exprs_in_expr<F>(expr: &Expr, test_fn: &F) -> Vec<Expr>
where
F: Fn(&Expr) -> bool,
{
let mut exprs = vec![];
expr.apply(&mut |expr| {
if test_fn(expr) {
if !(exprs.contains(expr)) {
exprs.push(expr.clone())
}
// stop recursing down this expr once we find a match
return Ok(VisitRecursion::Skip);
}
Ok(VisitRecursion::Continue)
})
// pre_visit always returns OK, so this will always too
.expect("no way to return error during recursion");
exprs
}
|
// Copyright (c) 2020 Allen Wild
// SPDX-License-Identifier: MIT OR Apache-2.0
use yall::log_macros::*;
use yall::Logger;
fn main() {
let count: usize = match std::env::args().nth(1) {
Some(c) => c.parse().unwrap(),
None => 100,
};
Logger::new().init();
for i in 1..=count {
info!("info log {}", i);
}
}
|
use super::*;
use crate::mock::{Currency, ExtBuilder, Faucet, Origin, Test, ALICE, HDX};
use frame_support::traits::OnFinalize;
use frame_support::{assert_noop, assert_ok};
#[test]
fn rampage_mints() {
ExtBuilder::default().build_rampage().execute_with(|| {
assert_ok!(Faucet::rampage_mint(Origin::signed(ALICE), HDX, 1000));
assert_eq!(Currency::free_balance(HDX, &ALICE), 2000);
});
}
#[test]
fn mints() {
ExtBuilder::default().build_live().execute_with(|| {
assert_eq!(Currency::free_balance(2000, &ALICE), 0);
assert_ok!(Faucet::mint(Origin::signed(ALICE)));
assert_eq!(Currency::free_balance(2000, &ALICE), 1_000_000_000_000_000);
assert_eq!(Currency::free_balance(3000, &ALICE), 1_000_000_000_000_000);
assert_eq!(Currency::free_balance(4000, &ALICE), 0);
});
}
#[test]
fn rampage_disabled() {
ExtBuilder::default().build_live().execute_with(|| {
assert_noop!(
Faucet::rampage_mint(Origin::signed(ALICE), HDX, 1000),
Error::<Test>::RampageMintNotAllowed
);
});
}
#[test]
fn mint_limit() {
ExtBuilder::default().build_live().execute_with(|| {
assert_ok!(Faucet::mint(Origin::signed(ALICE)));
assert_ok!(Faucet::mint(Origin::signed(ALICE)));
assert_ok!(Faucet::mint(Origin::signed(ALICE)));
assert_ok!(Faucet::mint(Origin::signed(ALICE)));
assert_ok!(Faucet::mint(Origin::signed(ALICE)));
assert_noop!(
Faucet::mint(Origin::signed(ALICE)),
Error::<Test>::MaximumMintLimitReached
);
<Faucet as OnFinalize<u64>>::on_finalize(1);
assert_ok!(Faucet::mint(Origin::signed(ALICE)));
assert_eq!(Currency::free_balance(2000, &ALICE), 6_000_000_000_000_000);
});
}
|
use std::collections::HashMap;
use std::sync::{Arc, RwLock};
fn main() {
let lotable: Arc<RwLock<HashMap<String, u64>>> = Arc::new(RwLock::new(HashMap::default()));
// RW from 1_000 threads concurrently.
let thread_count = 8;
let mut threads = vec![];
for thread_no in 0..thread_count {
let lotable = lotable.clone();
let t = std::thread::Builder::new()
.name(format!("t_{}", thread_no))
.spawn(move || {
let key = format!("{}", thread_no);
let mut loguard = lotable.write().unwrap();
loguard.insert(key.clone(), thread_no);
drop(loguard);
let loguard = lotable.read().unwrap();
let _ = loguard.get(&key).unwrap();
})
.unwrap();
threads.push(t);
}
for t in threads.into_iter() {
t.join().unwrap();
}
}
|
use std::fmt;
#[derive(Debug)]
#[derive(PartialEq)]
pub struct Clock {
hours: i32,
minutes: i32
}
impl Clock {
pub fn new(hours: i32, minutes: i32) -> Self {
Clock {
hours: Clock::initiate_hours(hours, minutes),
minutes: Clock::roll_over_minutes(minutes)
}
}
pub fn add_minutes(&mut self, minutes: i32) -> Self {
Clock::new(self.hours + Clock::roll_over_hours(minutes, self.minutes),
Clock::roll_over_minutes(self.minutes + minutes))
}
fn roll_over_hours(minutes: i32, previous_minutes: i32) -> i32 {
match previous_minutes + minutes < 0 {
false => (previous_minutes + minutes) / 60,
true => (previous_minutes + minutes) / 60 -1
}
}
fn mathematical_modulo(n: i32, m: i32) -> i32 {
// equivalent to mod in haskell or % in python
let mut rem = n % m;
if ((rem < 0) && (m > 0)) || (rem > 0) && (m < 0) {
rem += m
}
rem
}
fn initiate_hours(hours: i32, minutes: i32) -> i32 {
Clock::mathematical_modulo(hours + Clock::roll_over_hours(minutes, 0), 24)
}
fn roll_over_minutes(minutes: i32) -> i32 {
Clock::mathematical_modulo(minutes, 60)
}
fn stringify_time(time_representation: i32) -> String {
match time_representation.to_string().len() {
1 => {
("0".to_owned() + &time_representation.to_string()).into()
}
_ => time_representation.to_string()
}
}
}
impl fmt::Display for Clock {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}:{}", Clock::stringify_time(self.hours),
Clock::stringify_time(self.minutes))
}
}
|
use actix_web::{test, FromRequest, HttpRequest, State};
use bigneon_api::config::{Config, Environment};
use bigneon_api::mail::transports::TestTransport;
use bigneon_api::server::AppState;
pub struct TestRequest {
pub request: HttpRequest<AppState>,
pub config: Config,
}
impl TestRequest {
pub fn test_transport(&self) -> &TestTransport {
self.config
.mail_transport
.as_any()
.downcast_ref::<TestTransport>()
.unwrap()
}
pub fn create() -> TestRequest {
TestRequest::create_with_uri("/")
}
pub fn create_with_uri(path: &str) -> TestRequest {
let mut config = Config::new(Environment::Test);
config.token_secret = "test_secret".into();
config.token_issuer = "bn-api-test".into();
config.mail_from_email = "support@bigneon.com".to_string();
config.mail_from_name = "Big Neon".to_string();
let test_request = test::TestRequest::with_state(AppState::new(config.clone()));
// TODO: actix-web test requests do not allow router customization except
// within crate. Forcing an ID here so the extractor can still build the
// parameters wrapped in the Path struct. Should refactor when they settle
// on a final test request design as the current does not support extractors.
let request = test_request
.param("id", "0f85443e-9e70-45ba-bf28-0f59c183856f")
.uri(path)
.finish();
TestRequest { request, config }
}
pub fn create_with_uri_event_ticket(path: &str) -> TestRequest {
let mut config = Config::new(Environment::Test);
config.token_secret = "test_secret".into();
config.token_issuer = "bn-api-test".into();
config.mail_from_email = "support@bigneon.com".to_string();
config.mail_from_name = "Big Neon".to_string();
let test_request = test::TestRequest::with_state(AppState::new(config.clone()));
// TODO: actix-web test requests do not allow router customization except
// within crate. Forcing an ID here so the extractor can still build the
// parameters wrapped in the Path struct. Should refactor when they settle
// on a final test request design as the current does not support extractors.
let request = test_request
.param("event_id", "0f85443e-9e70-45ba-bf28-0f59c183856f")
.param("ticket_type_id", "0f85443e-9e70-45ba-bf28-0f59c183856f")
.uri(path)
.finish();
TestRequest { request, config }
}
pub fn extract_state(&self) -> State<AppState> {
State::<AppState>::extract(&self.request)
}
}
|
#![allow(dead_code)]
use crate::{regex};
use lazy_static::lazy_static;
use regex::Regex;
use std::collections::HashMap;
lazy_static! {
static ref MEM_REGEX: Regex = regex!(r"^mem\[([0-9]+)\] = ([0-9]+)$");
static ref MASK_REGEX: Regex = regex!(r"^mask = ([X01]+)$");
}
pub fn day15() {
println!("rambunctious_recitation part 1: {:?}", recitation(vec![12,20,0,6,1,17,7], 2020));
println!("rambunctious_recitation part 1: {:?}", recitation(vec![12,20,0,6,1,17,7], 30000000));
}
pub fn recitation(input: Vec<u64>, max_turn: u64) -> u64 {
let mut cache: HashMap<u64, u64> = HashMap::new();
let mut turn: u64 = 0;
let mut last_number_turn: u64 = 0;
let mut last_number: u64 = 0;
for i in input.iter() {
turn += 1;
last_number = *i;
cache.insert(last_number, turn);
}
while turn < max_turn {
last_number = if last_number_turn > 0 {
turn - last_number_turn
} else {
0
};
last_number_turn = *cache.get(&last_number).unwrap_or(&0);
turn += 1;
cache.insert(last_number, turn);
}
last_number
}
#[test]
fn test_day15() {
assert_eq!(
recitation(vec![0, 3, 6], 2020), 436
);
assert_eq!(
recitation(vec![3, 1, 2], 2020), 1836
);
assert_eq!(
recitation(vec![3, 1, 2], 30000000), 362
);
} |
use types::{int_t};
#[no_mangle]
pub extern fn isalnum(c: int_t) -> int_t {
match c as u8 as char {
'a'...'z' => 1,
'A'...'Z' => 1,
'0'...'9' => 1,
_ => 0,
}
}
#[no_mangle]
pub extern fn isalpha(c: int_t) -> int_t {
match c as u8 as char {
'a'...'z' => 1,
'A'...'Z' => 1,
_ => 0,
}
}
#[no_mangle]
pub extern fn isblank(c: int_t) -> int_t {
match c as u8 as char {
' ' | '\t' => 1,
_ => 0,
}
}
#[no_mangle]
pub extern fn iscntrl(c: int_t) -> int_t {
match c as u8 as char {
'\x00'...'\x19' => 1,
'\x7f' => 1,
_ => 0,
}
}
#[no_mangle]
pub extern fn isdigit(c: int_t) -> int_t {
match c as u8 as char {
'0'...'9' => 1,
_ => 0,
}
}
#[no_mangle]
pub extern fn isgraph(c: int_t) -> int_t {
match c {
0x21...0x7e => 1,
_ => 0,
}
}
#[no_mangle]
pub extern fn islower(c: int_t) -> int_t {
match c as u8 as char {
'a'...'z' => 1,
_ => 0,
}
}
#[no_mangle]
pub extern fn isprint(c: int_t) -> int_t {
match c {
0x20...0x7e => 1,
_ => 0,
}
}
#[no_mangle]
pub extern fn ispunct(c: int_t) -> int_t {
((isgraph(c) != 0) && (isalpha(c) == 0)) as int_t
}
#[no_mangle]
pub extern fn isspace(c: int_t) -> int_t {
match c as u8 as char {
' '|'\t'|'\n'|'\x0b'|'\x0c'|'\r' => 1,
_ => 0,
}
}
#[no_mangle]
pub extern fn isupper(c: int_t) -> int_t {
match c as u8 as char {
'A'...'Z' => 1,
_ => 0,
}
}
#[no_mangle]
pub extern fn isxdigit(c: int_t) -> int_t {
match c as u8 as char {
'0'...'9' => 1,
'A'...'F' => 1,
'a'...'f' => 1,
_ => 0,
}
}
#[no_mangle]
pub extern fn tolower(c: int_t) -> int_t {
match c as u8 as char {
'A'...'Z' => c + 0x20,
_ => c,
}
}
#[no_mangle]
pub extern fn toupper(c: int_t) -> int_t {
match c as u8 as char {
'a'...'z' => c - 0x20,
_ => c,
}
}
|
use anyhow::Result;
use std::rc::Rc;
use std::include_str;
pub use simple_gl::graphics::*;
pub struct Graphics {
pub program: Program
}
impl Graphics {
pub fn new() -> Result<Graphics> {
let vert_shader = VertexShader::from_source(
include_str!("../../resources/shaders/cube/cube.vert")
)?;
let geom_shader = GeometryShader::from_source(
include_str!("../../resources/shaders/cube/cube.geom")
)?;
let frag_shader = FragmentShader::from_source(
include_str!("../../resources/shaders/cube/cube.frag")
)?;
let mut program = Program::from_shaders(vec![
Rc::new(vert_shader),
Rc::new(geom_shader),
Rc::new(frag_shader),
])?;
program.set_used();
program.set_uniform("margin", 0.5f32);
Ok(Graphics {
program
})
}
} |
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// membarrier(2) commands, from include/uapi/linux/membarrier.h.
pub const MEMBARRIER_CMD_QUERY : i32 = 0;
pub const MEMBARRIER_CMD_GLOBAL : i32 = 1 << 0;
pub const MEMBARRIER_CMD_GLOBAL_EXPEDITED : i32 = 1 << 1;
pub const MEMBARRIER_CMD_REGISTER_GLOBAL_EXPEDITED : i32 = 1 << 2;
pub const MEMBARRIER_CMD_PRIVATE_EXPEDITED : i32 = 1 << 3;
pub const MEMBARRIER_CMD_REGISTER_PRIVATE_EXPEDITED : i32 = 1 << 4;
pub const MEMBARRIER_CMD_PRIVATE_EXPEDITED_SYNC_CORE : i32 = 1 << 5;
pub const MEMBARRIER_CMD_REGISTER_PRIVATE_EXPEDITED_SYNC_CORE : i32 = 1 << 6;
pub const MEMBARRIER_CMD_PRIVATE_EXPEDITED_RSEQ : i32 = 1 << 7;
pub const MEMBARRIER_CMD_REGISTER_PRIVATE_EXPEDITED_RSEQ : i32 = 1 << 8;
// membarrier(2) flags, from include/uapi/linux/membarrier.h.
pub const MEMBARRIER_CMD_FLAG_CPU : u32 = 1 << 0; |
use projecteuler::helper;
use projecteuler::modulo;
fn main() {
helper::check_bench(|| {
solve(1000);
});
assert_eq!(solve(1000), 9110846700);
dbg!(solve(1000));
}
//sadly this overflows for the question if only using usize (on a 64 bit machine)
//thats why I also implemented a version using 128 bit numbers :)
fn solve(n: usize) -> usize {
let m = 10_000_000_000;
let mut acc = 0;
for i in 1..=n {
acc += modulo::modulo_power(i as u128, i as u128, m as u128) as usize;
acc %= m;
}
acc
}
|
use anyhow::Result;
use qapi::qga;
use clap::Parser;
use tokio::time::{Duration, timeout};
use super::{GlobalArgs, QgaStream};
#[derive(Parser, Debug)]
/// Displays information about the guest, and can be used to check that the guest agent is running
pub(crate) struct Info {
#[clap(short = 'O', long = "os")]
os_info: bool,
}
impl Info {
pub async fn run(self, qga: QgaStream, _args: GlobalArgs) -> Result<i32> {
let info = qga.execute(qga::guest_info { }).await?;
println!("Guest Info: {:#?}", info);
if self.os_info {
let osinfo = qga.execute(qga::guest_get_osinfo { }).await?;
println!("Guest OS Info: {:#?}", osinfo);
}
Ok(0)
}
}
#[derive(Parser, Debug)]
/// Displays information about the guest, and can be used to check that the guest agent is running
pub(crate) struct Ping {
#[clap(short, long)]
repeat: bool,
#[clap(short, long = "timeout")]
timeout_seconds: Option<u64>,
}
impl Ping {
fn timeout(&self) -> Option<Duration> {
self.timeout_seconds.map(Duration::from_secs)
}
pub async fn run(self, qga: QgaStream, args: GlobalArgs) -> Result<i32> {
let duration = self.timeout();
if self.repeat {
qemucomm::wait(duration, async move {
let mut sync_value = 0;
loop {
match timeout(Duration::from_secs(1), self.ping(&qga, &args)).await {
Err(_) => {
let _ = timeout(Duration::from_secs(1), qga.guest_sync(sync_value)).await;
sync_value = sync_value.wrapping_add(1);
},
Ok(Err(e)) => break Err(e),
Ok(Ok(())) => break Ok(()),
}
}
}).await?;
} else {
qemucomm::wait(duration, self.ping(&qga, &args)).await?;
}
Ok(0)
}
async fn ping(&self, qga: &QgaStream, _args: &GlobalArgs) -> Result<()> {
qga.execute(qga::guest_ping { }).await?;
Ok(())
}
}
|
use super::{dump::dump_data_frames, read_group_data, run_data_test, InfluxRpcTest};
use async_trait::async_trait;
use futures::{prelude::*, FutureExt};
use generated_types::{
node::Logical, read_response::frame::Data, storage_client::StorageClient, ReadFilterRequest,
};
use influxdb_iox_client::connection::GrpcConnection;
use itertools::Itertools;
use std::sync::Arc;
use test_helpers_end_to_end::{
maybe_skip_integration, DataGenerator, GrpcRequestBuilder, MiniCluster, Step, StepTest,
StepTestState,
};
#[tokio::test]
async fn read_filter() {
let generator = Arc::new(DataGenerator::new());
run_data_test(Arc::clone(&generator), Box::new(move |state: &mut StepTestState| {
let generator = Arc::clone(&generator);
async move {
let mut storage_client = state.cluster().querier_storage_client();
let read_filter_request = GrpcRequestBuilder::new()
.source(state.cluster())
.timestamp_range(generator.min_time(), generator.max_time())
.tag_predicate("host", "server01")
.build_read_filter();
let read_response = storage_client
.read_filter(read_filter_request)
.await
.unwrap();
let responses: Vec<_> = read_response.into_inner().try_collect().await.unwrap();
let frames: Vec<Data> = responses
.into_iter()
.flat_map(|r| r.frames)
.flat_map(|f| f.data)
.collect();
let actual_frames = dump_data_frames(&frames);
let expected_frames = generator.substitute_nanos(&[
"SeriesFrame, tags: _field=value,_measurement=cpu_load_short,host=server01, type: 0",
"FloatPointsFrame, timestamps: [ns1], values: \"27.99\"",
"SeriesFrame, tags: _field=value,_measurement=cpu_load_short,host=server01,region=us-east, type: 0",
"FloatPointsFrame, timestamps: [ns3], values: \"1234567.891011\"",
"SeriesFrame, tags: _field=value,_measurement=cpu_load_short,host=server01,region=us-west, type: 0",
"FloatPointsFrame, timestamps: [ns0, ns4], values: \"0.64,0.000003\"",
"SeriesFrame, tags: _field=in,_measurement=swap,host=server01,name=disk0, type: 1",
"IntegerPointsFrame, timestamps: [ns6], values: \"3\"",
"SeriesFrame, tags: _field=out,_measurement=swap,host=server01,name=disk0, type: 1",
"IntegerPointsFrame, timestamps: [ns6], values: \"4\""
]);
assert_eq!(
expected_frames,
actual_frames,
"Expected:\n{}\nActual:\n{}",
expected_frames.join("\n"),
actual_frames.join("\n")
)
}.boxed()
})).await
}
#[tokio::test]
pub async fn read_filter_regex_operator() {
do_read_filter_test(
read_group_data(),
GrpcRequestBuilder::new()
.timestamp_range(0, 2001) // include all data
.regex_match_predicate("host", "^b.+"),
vec![
"SeriesFrame, tags: _field=usage_system,_measurement=cpu,cpu=cpu1,host=bar, type: 0",
"FloatPointsFrame, timestamps: [1000, 2000], values: \"20,21\"",
"SeriesFrame, tags: _field=usage_user,_measurement=cpu,cpu=cpu1,host=bar, type: 0",
"FloatPointsFrame, timestamps: [1000, 2000], values: \"81,82\"",
"SeriesFrame, tags: _field=usage_system,_measurement=cpu,cpu=cpu2,host=bar, type: 0",
"FloatPointsFrame, timestamps: [1000, 2000], values: \"40,41\"",
"SeriesFrame, tags: _field=usage_user,_measurement=cpu,cpu=cpu2,host=bar, type: 0",
"FloatPointsFrame, timestamps: [1000, 2000], values: \"51,52\"",
],
)
.await
}
#[tokio::test]
pub async fn read_filter_empty_tag_eq() {
do_read_filter_test(
vec!["cpu value=1 1000", "cpu,host=server01 value=2 2000"],
GrpcRequestBuilder::new()
.timestamp_range(0, 2001) // include all data
// https://github.com/influxdata/influxdb_iox/issues/3430
// host = '' means where host is not present
.tag_predicate("host", ""),
vec![
"SeriesFrame, tags: _field=value,_measurement=cpu, type: 0",
"FloatPointsFrame, timestamps: [1000], values: \"1\"",
],
)
.await
}
#[tokio::test]
pub async fn read_filter_empty_tag_not_regex() {
do_read_filter_test(
vec!["cpu value=1 1000", "cpu,host=server01 value=2 2000"],
GrpcRequestBuilder::new()
.timestamp_range(0, 2001) // include all data
// https://github.com/influxdata/influxdb_iox/issues/3434
// host !~ /^server01$/ means where host doesn't start with `server01`
.not_regex_match_predicate("host", "^server01"),
vec![
"SeriesFrame, tags: _field=value,_measurement=cpu, type: 0",
"FloatPointsFrame, timestamps: [1000], values: \"1\"",
],
)
.await
}
#[tokio::test]
pub async fn read_filter_empty_tag_regex() {
do_read_filter_test(
vec!["cpu value=1 1000", "cpu,host=server01 value=2 2000"],
GrpcRequestBuilder::new()
.timestamp_range(0, 2001) // include all data
// host =~ /.+/ means where host is at least one character
.regex_match_predicate("host", ".+"),
vec![
"SeriesFrame, tags: _field=value,_measurement=cpu,host=server01, type: 0",
"FloatPointsFrame, timestamps: [2000], values: \"2\"",
],
)
.await
}
#[tokio::test]
pub async fn read_filter_periods() {
do_read_filter_test(
vec![
"measurement.one,tag.one=foo field.one=1,field.two=100 1000",
"measurement.one,tag.one=bar field.one=2,field.two=200 2000",
],
GrpcRequestBuilder::new().timestamp_range(0, 2001),
vec![
"SeriesFrame, tags: _field=field.one,_measurement=measurement.one,tag.one=bar, type: 0",
"FloatPointsFrame, timestamps: [2000], values: \"2\"",
"SeriesFrame, tags: _field=field.two,_measurement=measurement.one,tag.one=bar, type: 0",
"FloatPointsFrame, timestamps: [2000], values: \"200\"",
"SeriesFrame, tags: _field=field.one,_measurement=measurement.one,tag.one=foo, type: 0",
"FloatPointsFrame, timestamps: [1000], values: \"1\"",
"SeriesFrame, tags: _field=field.two,_measurement=measurement.one,tag.one=foo, type: 0",
"FloatPointsFrame, timestamps: [1000], values: \"100\"",
],
)
.await
}
#[tokio::test]
pub async fn read_filter_periods_tag_predicate() {
do_read_filter_test(
vec![
"measurement.one,tag.one=foo field.one=1,field.two=100 1000",
"measurement.one,tag.one=bar field.one=2,field.two=200 2000",
],
GrpcRequestBuilder::new()
.tag_predicate("tag.one", "foo")
.timestamp_range(0, 2001),
vec![
"SeriesFrame, tags: _field=field.one,_measurement=measurement.one,tag.one=foo, type: 0",
"FloatPointsFrame, timestamps: [1000], values: \"1\"",
"SeriesFrame, tags: _field=field.two,_measurement=measurement.one,tag.one=foo, type: 0",
"FloatPointsFrame, timestamps: [1000], values: \"100\"",
],
)
.await
}
#[tokio::test]
pub async fn read_filter_periods_multi_tag_predicate() {
do_read_filter_test(
// this setup has tags and fields that would be the same except for the period
vec![
"h2o,state=CA temp=90.0 100",
"h2o,state=CA,city.state=LA temp=90.0 200",
"h2o,state=CA,state.city=LA temp=91.0 300",
],
GrpcRequestBuilder::new()
.tag_predicate("city.state", "LA")
.timestamp_range(0, 2001),
// only the one series (that has city.state), should not get state as well
vec![
"SeriesFrame, tags: _field=temp,_measurement=h2o,city.state=LA,state=CA, type: 0",
"FloatPointsFrame, timestamps: [200], values: \"90\"",
],
)
.await
}
#[tokio::test]
pub async fn read_filter_periods_multi_tag_predicate2() {
do_read_filter_test(
// this setup has tags and fields that would be the same except for the period
vec![
"h2o,state=CA temp=90.0 100",
"h2o,state=CA,city.state=LA temp=90.0 200",
],
GrpcRequestBuilder::new()
.tag_predicate("state", "CA")
.timestamp_range(0, 2001),
// all the series (should not filter out the city.state one)
vec![
"SeriesFrame, tags: _field=temp,_measurement=h2o,state=CA, type: 0",
"FloatPointsFrame, timestamps: [100], values: \"90\"",
"SeriesFrame, tags: _field=temp,_measurement=h2o,city.state=LA,state=CA, type: 0",
"FloatPointsFrame, timestamps: [200], values: \"90\"",
],
)
.await
}
#[tokio::test]
pub async fn read_filter_periods_multi_tag_predicate3() {
do_read_filter_test(
// this setup has tags and fields that would be the same except for the period
vec!["h2o,.state=CA temp=90.0 100", "h2o,.state=CA temp=90.0 200"],
GrpcRequestBuilder::new()
.tag_predicate(".state", "CA")
.timestamp_range(0, 2001),
// all the series (should not filter out the city.state one)
vec![
"SeriesFrame, tags: .state=CA,_field=temp,_measurement=h2o, type: 0",
"FloatPointsFrame, timestamps: [100, 200], values: \"90,90\"",
],
)
.await
}
#[tokio::test]
pub async fn read_filter_periods_field_predicate() {
do_read_filter_test(
vec![
"measurement.one,tag.one=foo field.one=1,field.two=100 1000",
"measurement.one,tag.one=bar field.one=2,field.two=200 2000",
],
GrpcRequestBuilder::new()
.field_predicate("field.two")
.timestamp_range(0, 2001),
vec![
"SeriesFrame, tags: _field=field.two,_measurement=measurement.one,tag.one=bar, type: 0",
"FloatPointsFrame, timestamps: [2000], values: \"200\"",
"SeriesFrame, tags: _field=field.two,_measurement=measurement.one,tag.one=foo, type: 0",
"FloatPointsFrame, timestamps: [1000], values: \"100\"",
],
)
.await
}
#[tokio::test]
pub async fn read_filter_periods_multi_field_prediate() {
do_read_filter_test(
// this setup has tags and fields that would be the same except for the period
vec![
"h2o,state=CA temp=90.0 100",
"h2o,state=CA,city.state=LA temp=90.0 200",
"h2o,state=CA,state.city=LA temp=91.0 300",
"h2o,state=CA,state.city=LA temp.foo=92.0 400",
"h2o,state=CA,state.city=LA foo.temp=93.0 500",
],
GrpcRequestBuilder::new()
.field_predicate("temp")
.timestamp_range(0, 2001),
// expect not to see temp.foo
vec![
"SeriesFrame, tags: _field=temp,_measurement=h2o,state=CA, type: 0",
"FloatPointsFrame, timestamps: [100], values: \"90\"",
"SeriesFrame, tags: _field=temp,_measurement=h2o,state=CA,state.city=LA, type: 0",
"FloatPointsFrame, timestamps: [300], values: \"91\"",
"SeriesFrame, tags: _field=temp,_measurement=h2o,city.state=LA,state=CA, type: 0",
"FloatPointsFrame, timestamps: [200], values: \"90\"",
],
)
.await
}
#[tokio::test]
pub async fn read_filter_periods_multi_field_predicate2() {
do_read_filter_test(
// this setup has tags and fields that would be the same except for the period
vec![
"h2o,state=CA temp=90.0 100",
"h2o,state=CA,city.state=LA temp=90.0 200",
"h2o,state=CA,state.city=LA temp=91.0 300",
"h2o,state=CA,state.city=LA temp.foo=92.0 400",
"h2o,state=CA,state.city=LA foo.temp=93.0 500",
],
GrpcRequestBuilder::new()
.field_predicate("temp.foo")
.timestamp_range(0, 2001),
// expect only one series
vec![
"SeriesFrame, tags: _field=temp.foo,_measurement=h2o,state=CA,state.city=LA, type: 0",
"FloatPointsFrame, timestamps: [400], values: \"92\"",
],
)
.await
}
// Test for https://github.com/influxdata/influxdb_iox/issues/7663
#[tokio::test]
pub async fn read_filter_multi_data_frame() {
// there will be 1000 points per frame, so
// this number of points will return 3 frames
// with the last containing just one point.
let num_points: i64 = 2001;
let input_lines: Vec<String> = (0..num_points)
.map(|i| {
format!(
"h2o,tagk0=tagv0 f0={} {}",
i,
1_000_000_000 + i * 1_000_000_000
)
})
.collect();
let input_lines: Vec<&str> = input_lines.iter().map(String::as_ref).collect();
let mut expected = vec!["SeriesFrame, tags: _field=f0,_measurement=h2o,tagk0=tagv0, type: 0"];
let ts_vec = (0..num_points)
.map(|i| 1_000_000_000 + i * 1_000_000_000)
.collect::<Vec<_>>();
let values_vec = (0..num_points).collect::<Vec<_>>();
let mut data_frames: Vec<String> = vec![];
for (ts_chunk, v_chunk) in ts_vec.chunks(1000).zip(values_vec.chunks(1000)) {
let ts_str = ts_chunk.iter().map(|ts| ts.to_string()).join(", ");
let v_str = v_chunk.iter().map(|v| v.to_string()).join(",");
data_frames.push(format!(
"FloatPointsFrame, timestamps: [{}], values: \"{}\"",
ts_str, v_str
));
}
data_frames
.iter()
.for_each(|line| expected.push(line.as_ref()));
// response should be broken into three frames
assert_eq!(3, data_frames.len());
do_read_filter_test(
input_lines,
GrpcRequestBuilder::new()
.field_predicate("f0")
.timestamp_range(0, num_points * 1_000_000_000_000),
expected,
)
.await
}
/// Sends the specified line protocol to a server with the timestamp/ predicate
/// predicate, and compares it against expected frames
async fn do_read_filter_test(
input_lines: Vec<&str>,
request_builder: GrpcRequestBuilder,
expected_frames: impl IntoIterator<Item = &str>,
) {
test_helpers::maybe_start_logging();
let database_url = maybe_skip_integration!();
let expected_frames: Vec<String> = expected_frames.into_iter().map(|s| s.to_string()).collect();
// Set up the cluster ====================================
let mut cluster = MiniCluster::create_shared(database_url).await;
let line_protocol = input_lines.join("\n");
StepTest::new(
&mut cluster,
vec![
Step::WriteLineProtocol(line_protocol),
Step::Custom(Box::new(move |state: &mut StepTestState| {
let request_builder = request_builder.clone();
let expected_frames = expected_frames.clone();
async move {
let mut storage_client = state.cluster().querier_storage_client();
println!("Sending read_filter request with {request_builder:#?}");
let read_filter_request =
request_builder.source(state.cluster()).build_read_filter();
let actual_frames =
do_read_filter_request(&mut storage_client, read_filter_request).await;
assert_eq!(
expected_frames, actual_frames,
"\n\nExpected:\n{expected_frames:#?}\n\nActual:\n{actual_frames:#?}\n\n",
);
}
.boxed()
})),
],
)
.run()
.await
}
/// Make a read_group request and returns the results in a comparable format
async fn do_read_filter_request(
storage_client: &mut StorageClient<GrpcConnection>,
request: tonic::Request<ReadFilterRequest>,
) -> Vec<String> {
let read_filter_response = storage_client
.read_filter(request)
.await
.expect("successful read_filter call");
let responses: Vec<_> = read_filter_response
.into_inner()
.try_collect()
.await
.unwrap();
let frames: Vec<_> = responses
.into_iter()
.flat_map(|r| r.frames)
.flat_map(|f| f.data)
.collect();
dump_data_frames(&frames)
}
#[tokio::test]
async fn no_predicate() {
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurementsMultiSeries",
request: GrpcRequestBuilder::new(),
expected_results: vec![
"SeriesFrame, tags: _field=temp,_measurement=h2o,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [100, 250], values: \"70.4,72.4\"",
"SeriesFrame, tags: _field=temp,_measurement=h2o,city=LA,state=CA, type: 0",
"FloatPointsFrame, timestamps: [200, 350], values: \"90,90\"",
"SeriesFrame, tags: _field=reading,_measurement=o2,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [100, 250], values: \"50,51\"",
"SeriesFrame, tags: _field=temp,_measurement=o2,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [100, 250], values: \"50.4,53.4\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn exclusive_timestamp_range_predicate() {
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurementsMultiSeries",
request: GrpcRequestBuilder::new()
// should *not* return the 350 row as the predicate is range.start <= ts < range.end
.timestamp_range(349, 350),
expected_results: vec![],
})
.run()
.await;
}
#[tokio::test]
async fn inclusive_timestamp_range_predicate() {
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurementsMultiSeries",
request: GrpcRequestBuilder::new()
// *should* return the 350 row as the predicate is range.start <= ts < range.end
.timestamp_range(350, 351),
expected_results: vec![
"SeriesFrame, tags: _field=temp,_measurement=h2o,city=LA,state=CA, type: 0",
"FloatPointsFrame, timestamps: [350], values: \"90\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn exact_timestamp_range_predicate_multiple_results() {
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurementsMultiSeries",
request: GrpcRequestBuilder::new().timestamp_range(250, 251),
expected_results: vec![
"SeriesFrame, tags: _field=temp,_measurement=h2o,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [250], values: \"72.4\"",
"SeriesFrame, tags: _field=reading,_measurement=o2,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [250], values: \"51\"",
"SeriesFrame, tags: _field=temp,_measurement=o2,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [250], values: \"53.4\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn tag_predicate_always_true() {
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurements",
request: GrpcRequestBuilder::new().tag_to_tag_predicate("region", "region"),
expected_results: vec![
"SeriesFrame, tags: _field=user,_measurement=cpu,region=west, type: 0",
"FloatPointsFrame, timestamps: [100, 150], values: \"23.2,21\"",
"SeriesFrame, tags: _field=bytes,_measurement=disk,region=east, type: 1",
"IntegerPointsFrame, timestamps: [200], values: \"99\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn unknown_columns_in_predicate_no_results() {
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurements",
request: GrpcRequestBuilder::new()
// bar is not a column that appears in the data; produce no results
.tag_predicate("bar", "baz"),
expected_results: vec![],
})
.run()
.await;
}
#[tokio::test]
async fn tag_predicate_containing_field() {
// Columns in the RPC predicate must be treated as tags:
// https://github.com/influxdata/idpe/issues/16238
Arc::new(ReadFilterTest {
setup_name: "StringFieldWithNumericValue",
request: GrpcRequestBuilder::new()
// fld exists in the table, but only as a field, not a tag, so no data should be
// returned.
.tag_predicate("fld", "200"),
expected_results: vec![],
})
.run()
.await;
}
#[tokio::test]
async fn tag_predicates() {
let expected_results = vec![
"SeriesFrame, tags: _field=temp,_measurement=h2o,city=LA,state=CA, type: 0",
"FloatPointsFrame, timestamps: [200], values: \"90\"",
];
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurementsMultiSeries",
request: GrpcRequestBuilder::new()
.timestamp_range(200, 300)
// filter to one row in h2o
.tag_predicate("state", "CA"),
expected_results: expected_results.clone(),
})
.run()
.await;
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurementsMultiSeries",
request: GrpcRequestBuilder::new()
.timestamp_range(200, 300)
// Same results via a != predicate.
.not_tag_predicate("state", "MA"),
expected_results,
})
.run()
.await;
}
#[tokio::test]
async fn field_and_tag_predicates() {
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurementsManyFields",
request: GrpcRequestBuilder::new()
// filter to one row in h2o
.field_predicate("other_temp")
.tag_predicate("state", "CA"),
expected_results: vec![
"SeriesFrame, tags: _field=other_temp,_measurement=h2o,city=Boston,state=CA, type: 0",
"FloatPointsFrame, timestamps: [350], values: \"72.4\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn field_exact_match_predicate() {
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurementsManyFields",
request: GrpcRequestBuilder::new().field_predicate("temp"),
expected_results: vec![
"SeriesFrame, tags: _field=temp,_measurement=h2o,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [50, 100000], values: \"70.4,70.4\"",
"SeriesFrame, tags: _field=temp,_measurement=o2,state=CA, type: 0",
"FloatPointsFrame, timestamps: [300], values: \"79\"",
"SeriesFrame, tags: _field=temp,_measurement=o2,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [50], values: \"53.4\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn not_field_predicate() {
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurementsManyFields",
request: GrpcRequestBuilder::new().not_field_predicate("temp"),
expected_results: vec![
"SeriesFrame, tags: _field=other_temp,_measurement=h2o,city=Boston,state=CA, type: 0",
"FloatPointsFrame, timestamps: [350], values: \"72.4\"",
"SeriesFrame, tags: _field=moisture,_measurement=h2o,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [100000], values: \"43\"",
"SeriesFrame, tags: _field=other_temp,_measurement=h2o,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [250], values: \"70.4\"",
"SeriesFrame, tags: _field=reading,_measurement=o2,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [50], values: \"51\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn field_regex_match_predicates() {
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurementsManyFields",
request: GrpcRequestBuilder::new().regex_match_predicate("_field", "temp"),
expected_results: vec![
// Should see results for temp and other_temp (but not reading)
"SeriesFrame, tags: _field=other_temp,_measurement=h2o,city=Boston,state=CA, type: 0",
"FloatPointsFrame, timestamps: [350], values: \"72.4\"",
"SeriesFrame, tags: _field=other_temp,_measurement=h2o,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [250], values: \"70.4\"",
"SeriesFrame, tags: _field=temp,_measurement=h2o,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [50, 100000], values: \"70.4,70.4\"",
"SeriesFrame, tags: _field=temp,_measurement=o2,state=CA, type: 0",
"FloatPointsFrame, timestamps: [300], values: \"79\"",
"SeriesFrame, tags: _field=temp,_measurement=o2,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [50], values: \"53.4\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn field_and_measurement_predicates() {
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurementsManyFields",
request: GrpcRequestBuilder::new()
.field_predicate("temp")
.measurement_predicate("h2o"),
expected_results: vec![
"SeriesFrame, tags: _field=temp,_measurement=h2o,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [50, 100000], values: \"70.4,70.4\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn multi_field_and_measurement_predicates() {
// Predicate should pick 'temp' field from h2o and 'other_temp' from o2
//
// (_field = 'other_temp' AND _measurement = 'h2o') OR (_field = 'temp' AND _measurement = 'o2')
let p1 = GrpcRequestBuilder::new()
.field_predicate("other_temp")
.measurement_predicate("h2o");
let p2 = GrpcRequestBuilder::new()
.field_predicate("temp")
.measurement_predicate("o2");
let node2 = p2.predicate.unwrap().root.unwrap();
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurementsManyFields",
request: p1.combine_predicate(Logical::Or, node2),
expected_results: vec![
// SHOULD NOT contain temp from h2o
"SeriesFrame, tags: _field=other_temp,_measurement=h2o,city=Boston,state=CA, type: 0",
"FloatPointsFrame, timestamps: [350], values: \"72.4\"",
"SeriesFrame, tags: _field=other_temp,_measurement=h2o,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [250], values: \"70.4\"",
"SeriesFrame, tags: _field=temp,_measurement=o2,state=CA, type: 0",
"FloatPointsFrame, timestamps: [300], values: \"79\"",
"SeriesFrame, tags: _field=temp,_measurement=o2,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [50], values: \"53.4\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn measurement_predicates() {
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurementsManyFields",
request: GrpcRequestBuilder::new()
// use an expr on table name to pick just the last row from o2
.timestamp_range(200, 400)
.measurement_predicate("o2"),
expected_results: vec![
"SeriesFrame, tags: _field=temp,_measurement=o2,state=CA, type: 0",
"FloatPointsFrame, timestamps: [300], values: \"79\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn predicate_no_columns() {
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurements",
request: GrpcRequestBuilder::new()
// Predicate with no columns, only literals.
.lit_lit_predicate("foo", "foo"),
expected_results: vec![
"SeriesFrame, tags: _field=user,_measurement=cpu,region=west, type: 0",
"FloatPointsFrame, timestamps: [100, 150], values: \"23.2,21\"",
"SeriesFrame, tags: _field=bytes,_measurement=disk,region=east, type: 1",
"IntegerPointsFrame, timestamps: [200], values: \"99\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn tag_regex_match_predicates() {
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurementsMultiSeries",
request: GrpcRequestBuilder::new()
.timestamp_range(200, 300)
// will match CA state
.regex_match_predicate("state", "C.*"),
expected_results: vec![
"SeriesFrame, tags: _field=temp,_measurement=h2o,city=LA,state=CA, type: 0",
"FloatPointsFrame, timestamps: [200], values: \"90\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn tag_regex_not_match_predicates() {
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurementsMultiSeries",
request: GrpcRequestBuilder::new()
.timestamp_range(200, 300)
// will filter out any rows with a state that matches "CA"
.not_regex_match_predicate("state", "C.*"),
expected_results: vec![
"SeriesFrame, tags: _field=temp,_measurement=h2o,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [250], values: \"72.4\"",
"SeriesFrame, tags: _field=reading,_measurement=o2,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [250], values: \"51\"",
"SeriesFrame, tags: _field=temp,_measurement=o2,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [250], values: \"53.4\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn tag_regex_escaped_predicates() {
Arc::new(ReadFilterTest {
setup_name: "MeasurementStatusCode",
request: GrpcRequestBuilder::new()
// Came from InfluxQL as:
//
// ```text
// SELECT value
// FROM db0.rp0.status_code
// WHERE url =~ /https\:\/\/influxdb\.com/
// ```
.regex_match_predicate("url", r#"https\://influxdb\.com"#),
expected_results: vec![
// expect one series with influxdb.com
"SeriesFrame, tags: _field=value,_measurement=status_code,url=https://influxdb.com, \
type: 0",
"FloatPointsFrame, timestamps: [1527018816000000000], values: \"418\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn tag_not_match_regex_escaped_predicates() {
Arc::new(ReadFilterTest {
setup_name: "MeasurementStatusCode",
request: GrpcRequestBuilder::new()
// Came from InfluxQL as:
//
// ```text
// SELECT value
// FROM db0.rp0.status_code
// WHERE url !~ /https\:\/\/influxdb\.com/
// ```
.not_regex_match_predicate("url", r#"https\://influxdb\.com"#),
expected_results: vec![
// expect one series with example.com
"SeriesFrame, tags: _field=value,_measurement=status_code,url=http://www.example.com, \
type: 0",
"FloatPointsFrame, timestamps: [1527018806000000000], values: \"404\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn predicate_unsupported_in_scan() {
// These kinds of predicates can't be pushed down into chunks, but they can be evaluated by the
// general purpose DataFusion plan
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurementsMultiTagValue",
request: GrpcRequestBuilder::new()
.or_tag_predicates([("state", "CA"), ("city", "Boston")].into_iter()),
expected_results: vec![
// Note these results include data from both o2 and h2o
"SeriesFrame, tags: _field=temp,_measurement=h2o,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [100], values: \"70.4\"",
"SeriesFrame, tags: _field=temp,_measurement=h2o,city=LA,state=CA, type: 0",
"FloatPointsFrame, timestamps: [200], values: \"90\"",
"SeriesFrame, tags: _field=reading,_measurement=o2,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [100], values: \"50\"",
"SeriesFrame, tags: _field=temp,_measurement=o2,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [100], values: \"50.4\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn multi_negation() {
// reproducer for https://github.com/influxdata/influxdb_iox/issues/4800
Arc::new(ReadFilterTest {
setup_name: "EndToEndTest",
request: GrpcRequestBuilder::new()
.or_tag_predicates([("host", "server01"), ("host", "")].into_iter()),
expected_results: vec![
"SeriesFrame, tags: _field=color,_measurement=attributes, type: 4",
"StringPointsFrame, timestamps: [8000], values: blue",
"SeriesFrame, tags: _field=value,_measurement=cpu_load_short,host=server01, type: 0",
"FloatPointsFrame, timestamps: [1000], values: \"27.99\"",
"SeriesFrame, tags: \
_field=value,_measurement=cpu_load_short,host=server01,region=us-east, type: 0",
"FloatPointsFrame, timestamps: [3000], values: \"1234567.891011\"",
"SeriesFrame, tags: \
_field=value,_measurement=cpu_load_short,host=server01,region=us-west, type: 0",
"FloatPointsFrame, timestamps: [0, 4000], values: \"0.64,0.000003\"",
"SeriesFrame, tags: _field=active,_measurement=status, type: 3",
"BooleanPointsFrame, timestamps: [7000], values: true",
"SeriesFrame, tags: _field=in,_measurement=swap,host=server01,name=disk0, type: 0",
"FloatPointsFrame, timestamps: [6000], values: \"3\"",
"SeriesFrame, tags: _field=out,_measurement=swap,host=server01,name=disk0, type: 0",
"FloatPointsFrame, timestamps: [6000], values: \"4\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn data_plan_order() {
Arc::new(ReadFilterTest {
setup_name: "MeasurementsSortableTags",
request: GrpcRequestBuilder::new(),
expected_results: vec![
"SeriesFrame, tags: _field=temp,_measurement=h2o,city=Boston,state=CA, type: 0",
"FloatPointsFrame, timestamps: [250], values: \"70.3\"",
"SeriesFrame, tags: _field=other,_measurement=h2o,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [250], values: \"5\"",
"SeriesFrame, tags: _field=temp,_measurement=h2o,city=Boston,state=MA, type: 0",
"FloatPointsFrame, timestamps: [250], values: \"70.5\"",
"SeriesFrame, tags: _field=temp,_measurement=h2o,city=Boston,state=MA,zz_tag=A, \
type: 0",
"FloatPointsFrame, timestamps: [1000], values: \"70.4\"",
"SeriesFrame, tags: _field=temp,_measurement=h2o,city=Kingston,state=MA,zz_tag=A, \
type: 0",
"FloatPointsFrame, timestamps: [800], values: \"70.1\"",
"SeriesFrame, tags: _field=temp,_measurement=h2o,city=Kingston,state=MA,zz_tag=B, \
type: 0",
"FloatPointsFrame, timestamps: [100], values: \"70.2\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn filter_on_value() {
Arc::new(ReadFilterTest {
setup_name: "MeasurementsForDefect2845",
request: GrpcRequestBuilder::new()
.field_value_predicate(1.77)
.field_predicate("load4"),
expected_results: vec![
"SeriesFrame, tags: _field=load4,_measurement=system,host=host.local, type: 0",
"FloatPointsFrame, timestamps: [1527018806000000000, 1527018826000000000], \
values: \"1.77,1.77\"",
],
})
.run()
.await;
}
#[tokio::test]
#[should_panic(expected = "Unsupported _field predicate")]
async fn unsupported_field_predicate() {
// Tell the test to panic with the expected message if `TEST_INTEGRATION` isn't set so that
// this still passes
maybe_skip_integration!("Unsupported _field predicate");
Arc::new(ReadFilterTest {
setup_name: "TwoMeasurementsManyFields",
request: GrpcRequestBuilder::new()
.not_field_predicate("temp")
.or_tag_predicates([("city", "Boston")].into_iter()),
expected_results: vec![
"SeriesFrame, tags: _field=temp,_measurement=o2,state=CA, type: 0",
"FloatPointsFrame, timestamps: [300], values: \"79\"",
],
})
.run()
.await;
}
#[tokio::test]
async fn periods_in_names() {
Arc::new(ReadFilterTest {
setup_name: "PeriodsInNames",
request: GrpcRequestBuilder::new().timestamp_range(0, 1_700_000_001_000_000_000),
expected_results: vec![
// Should return both series
"SeriesFrame, tags: \
_field=field.one,_measurement=measurement.one,tag.one=value,tag.two=other, type: 0",
"FloatPointsFrame, timestamps: [1609459201000000001], values: \"1\"",
"SeriesFrame, tags: \
_field=field.two,_measurement=measurement.one,tag.one=value,tag.two=other, type: 3",
"BooleanPointsFrame, timestamps: [1609459201000000001], values: true",
"SeriesFrame, tags: \
_field=field.one,_measurement=measurement.one,tag.one=value2,tag.two=other2, type: 0",
"FloatPointsFrame, timestamps: [1609459201000000002], values: \"1\"",
"SeriesFrame, tags: \
_field=field.two,_measurement=measurement.one,tag.one=value2,tag.two=other2, type: 3",
"BooleanPointsFrame, timestamps: [1609459201000000002], values: false",
],
})
.run()
.await;
}
#[tokio::test]
async fn periods_in_predicates() {
Arc::new(ReadFilterTest {
setup_name: "PeriodsInNames",
request: GrpcRequestBuilder::new()
.timestamp_range(0, 1_700_000_001_000_000_000)
.tag_predicate("tag.one", "value"),
expected_results: vec![
// Should return both series
"SeriesFrame, tags: \
_field=field.one,_measurement=measurement.one,tag.one=value,tag.two=other, type: 0",
"FloatPointsFrame, timestamps: [1609459201000000001], values: \"1\"",
"SeriesFrame, tags: \
_field=field.two,_measurement=measurement.one,tag.one=value,tag.two=other, type: 3",
"BooleanPointsFrame, timestamps: [1609459201000000001], values: true",
],
})
.run()
.await;
}
/// See https://github.com/influxdata/influxdb_iox/issues/7848
#[tokio::test]
async fn retention() {
test_helpers::maybe_start_logging();
let database_url = maybe_skip_integration!();
let table_name = "the_table";
let ts_max = i64::MAX;
// Set up the cluster ====================================
let mut cluster = MiniCluster::create_shared_never_persist(database_url).await;
StepTest::new(
&mut cluster,
vec![
Step::WriteLineProtocol(format!(
"{table_name},tag=A val=42i 0\n\
{table_name},tag=A val=43i {ts_max}"
)),
Step::SetRetention(Some(1)),
Step::Custom(Box::new(move |state: &mut StepTestState| {
async move {
let mut storage_client = state.cluster().querier_storage_client();
let read_filter_request = GrpcRequestBuilder::new()
.source(state.cluster())
.tag_predicate("tag", "A")
.build_read_filter();
let read_response = storage_client
.read_filter(read_filter_request)
.await
.unwrap();
let responses: Vec<_> = read_response.into_inner().try_collect().await.unwrap();
let frames: Vec<Data> = responses
.into_iter()
.flat_map(|r| r.frames)
.flat_map(|f| f.data)
.collect();
let actual_frames = dump_data_frames(&frames);
let expected_frames = vec![
"SeriesFrame, tags: _field=val,_measurement=the_table,tag=A, type: 1",
"IntegerPointsFrame, timestamps: [9223372036854775807], values: \"43\"",
];
assert_eq!(
expected_frames,
actual_frames,
"Expected:\n{}\nActual:\n{}",
expected_frames.join("\n"),
actual_frames.join("\n")
)
}
.boxed()
})),
],
)
.run()
.await
}
#[derive(Debug)]
struct ReadFilterTest {
setup_name: &'static str,
request: GrpcRequestBuilder,
expected_results: Vec<&'static str>,
}
#[async_trait]
impl InfluxRpcTest for ReadFilterTest {
fn setup_name(&self) -> &'static str {
self.setup_name
}
async fn request_and_assert(&self, cluster: &MiniCluster) {
let mut storage_client = cluster.querier_storage_client();
let read_filter_request = self.request.clone().source(cluster).build_read_filter();
let read_filter_response = storage_client
.read_filter(read_filter_request)
.await
.expect("successful read_filter call");
let responses: Vec<_> = read_filter_response
.into_inner()
.try_collect()
.await
.unwrap();
let frames: Vec<_> = responses
.into_iter()
.flat_map(|r| r.frames)
.flat_map(|f| f.data)
.collect();
let results = dump_data_frames(&frames);
assert_eq!(results, self.expected_results);
}
}
|
#[macro_use]
extern crate clap;
#[macro_use]
extern crate slog;
extern crate slog_term;
use slog::Drain;
use std::process;
use clap::{Arg, ArgMatches, App, SubCommand};
arg_enum! {
#[derive(Debug)]
enum Algorithm {
SHA1,
SHA256,
Argon2
}
}
fn run(matches: ArgMatches) -> Result<(), String> {
let min_log_level = match matches.occurrences_of("verbose") {
0 => slog::Level::Info,
1 => slog::Level::Debug,
2 | _ => slog::Level::Trace,
};
let decorator = slog_term::PlainSyncDecorator::new(std::io::stderr());
let drain = slog_term::FullFormat::new(decorator)
.build()
.filter_level(min_log_level)
.fuse();
let logger = slog::Logger::root(drain, o!());
trace!(logger, "app_setup");
// setting up app...
debug!(logger, "load_configuration");
trace!(logger, "app_setup_complete");
// starting processing...
info!(logger, "processing_started");
match matches.subcommand() {
("analyse", Some(m)) => run_analyse(m, &logger),
("verify", Some(m)) => run_verify(m, &logger),
_ => Ok(()),
}
}
fn run_analyse(matches: &ArgMatches, parent_logger: &slog::Logger) -> Result<(), String> {
let logger = parent_logger.new(o!("command" => "analyse"));
let input = matches.value_of("input-file").unwrap();
debug!(logger, "analysis_started"; "input_file" => input);
Ok(())
}
fn run_verify(matches: &ArgMatches, parent_logger: &slog::Logger) -> Result<(), String> {
let logger = parent_logger.new(o!("command" => "verify"));
let algorithm = value_t!(matches.value_of("algorithm"), Algorithm).unwrap();
debug!(logger, "verification_started"; "algorithm" => format!("{:?}", algorithm));
Ok(())
}
fn main() {
println!("24 Days of Rust vol. 2 - clap");
let matches = App::new("24daysofrust")
.version("0.1")
.author("Zbigniew Siciarz")
.about("learn you some Rust!")
.arg(Arg::with_name("verbose").short("v").multiple(true).help(
"verbosity level",
))
.subcommand(
SubCommand::with_name("analyse")
.about("Analyses the data from file")
.arg(
Arg::with_name("input-file")
.short("i")
.default_value("default.csv")
.value_name("FILE"),
),
)
.subcommand(
SubCommand::with_name("verify")
.about("Verifies the data")
.arg(
Arg::with_name("algorithm")
.short("a")
.help("Hash algorithm to use")
.possible_values(&Algorithm::variants())
.required(true)
.value_name("ALGORITHM"),
),
)
.get_matches();
if let Err(e) = run(matches) {
println!("Application error: {}", e);
process::exit(1);
}
}
|
#[doc = "Reader of register ITLINE27"]
pub type R = crate::R<u32, super::ITLINE27>;
#[doc = "Reader of field `USART1`"]
pub type USART1_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 0 - USART1"]
#[inline(always)]
pub fn usart1(&self) -> USART1_R {
USART1_R::new((self.bits & 0x01) != 0)
}
}
|
use crate::position::*;
use crate::types::*;
use crate::util::*;
use itertools::Itertools;
use lsp_types::*;
use ropey::{Rope, RopeSlice};
use std::collections::HashSet;
use std::fs::File;
use std::io::{BufReader, BufWriter, Write};
use std::os::unix::io::FromRawFd;
pub fn apply_text_edits_to_file(
uri: &Url,
text_edits: &[OneOf<TextEdit, AnnotatedTextEdit>],
offset_encoding: OffsetEncoding,
) -> std::io::Result<()> {
let path = uri.to_file_path().unwrap();
let filename = path.to_str().unwrap();
let path = std::ffi::CString::new(filename).unwrap();
let mut stat;
if unsafe {
stat = std::mem::zeroed();
libc::stat(path.as_ptr(), &mut stat)
} != 0
{
return Err(std::io::Error::new(
std::io::ErrorKind::PermissionDenied,
format!("Failed to stat {}", filename),
));
}
let file = File::open(filename)?;
let text = Rope::from_reader(BufReader::new(file))?;
let (temp_path, temp_file) = {
let template = format!("{}.XXXXXX", filename);
let cstr = std::ffi::CString::new(template).unwrap();
let ptr = cstr.into_raw();
let temp_fd = unsafe { libc::mkstemp(ptr) };
let cstr = unsafe { std::ffi::CString::from_raw(ptr) };
let temp_fd = cvt(temp_fd)?;
let temp_path = cstr.into_string().unwrap();
let temp_file = unsafe { File::from_raw_fd(temp_fd) };
(temp_path, temp_file)
};
fn apply_text_edits_to_file_impl(
text: Rope,
temp_file: File,
text_edits: &[OneOf<TextEdit, AnnotatedTextEdit>],
offset_encoding: OffsetEncoding,
) -> Result<(), std::io::Error> {
let mut output = BufWriter::new(temp_file);
let character_to_offset = match offset_encoding {
OffsetEncoding::Utf8 => character_to_offset_utf_8_code_units,
// Not a proper UTF-16 code units handling, but works within BMP
OffsetEncoding::Utf16 => character_to_offset_utf_8_code_points,
};
let text_len_lines = text.len_lines() as u64;
let mut cursor = 0;
for te in text_edits {
let TextEdit {
range: Range { start, end },
new_text,
} = match te {
OneOf::Left(edit) => edit,
OneOf::Right(annotated_edit) => &annotated_edit.text_edit,
};
if start.line as u64 >= text_len_lines || end.line as u64 >= text_len_lines {
return Err(std::io::Error::new(
std::io::ErrorKind::Other,
"Text edit range extends past end of file.",
));
}
let start_offset =
character_to_offset(text.line(start.line as _), start.character as _);
let end_offset = character_to_offset(text.line(end.line as _), end.character as _);
if start_offset.is_none() || end_offset.is_none() {
return Err(std::io::Error::new(
std::io::ErrorKind::Other,
"Text edit range points past end of line.",
));
}
let start_char = text.line_to_char(start.line as _) + start_offset.unwrap();
let end_char = text.line_to_char(end.line as _) + end_offset.unwrap();
for chunk in text.slice(cursor..start_char).chunks() {
output.write_all(chunk.as_bytes())?;
}
output.write_all(new_text.as_bytes())?;
cursor = end_char;
}
for chunk in text.slice(cursor..).chunks() {
output.write_all(chunk.as_bytes())?;
}
Ok(())
}
apply_text_edits_to_file_impl(text, temp_file, text_edits, offset_encoding)
.and_then(|_| std::fs::rename(&temp_path, filename))
.map(|_| unsafe {
libc::chmod(path.as_ptr(), stat.st_mode);
})
.map_err(|e| {
let _ = std::fs::remove_file(&temp_path);
e
})
}
// Adapted from std/src/sys/unix/mod.rs.
fn cvt(t: i32) -> std::io::Result<i32> {
if t == -1 {
Err(std::io::Error::last_os_error())
} else {
Ok(t)
}
}
fn character_to_offset_utf_8_code_points(line: RopeSlice, character: usize) -> Option<usize> {
if character < line.len_chars() {
Some(character)
} else {
None
}
}
fn character_to_offset_utf_8_code_units(line: RopeSlice, character: usize) -> Option<usize> {
if character < line.len_bytes() {
Some(line.byte_to_char(character))
} else {
None
}
}
pub fn apply_text_edits_to_buffer(
uri: Option<&Url>,
text_edits: &[OneOf<TextEdit, AnnotatedTextEdit>],
text: &Rope,
offset_encoding: OffsetEncoding,
) -> Option<String> {
// Empty text edits processed as a special case because Kakoune's `select` command
// doesn't support empty arguments list.
if text_edits.is_empty() {
return None;
}
let mut edits = text_edits
.iter()
.map(|text_edit| lsp_text_edit_to_kakoune(text_edit, text, offset_encoding))
.collect::<Vec<_>>();
// Adjoin selections detection and Kakoune side editing relies on edits being ordered left to
// right. Language servers usually send them such, but spec doesn't say anything about the order
// hence we ensure it by sorting. It's improtant to use stable sort to handle properly cases
// like multiple inserts in the same place.
edits.sort_by_key(|x| {
(
x.range.start.line,
x.range.start.column,
x.range.end.line,
x.range.end.column,
)
});
let select_edits = edits
.iter()
.map(|edit| format!("{}", edit.range))
.dedup()
.join(" ");
// Merged selections require one less selection cycle after the next restore
// to get to the next selection.
let merged_selections = edits
.windows(2)
.enumerate()
.filter_map(|(i, pair)| {
let end = &pair[0].range.end;
let start = &pair[1].range.start;
// Replacing adjoin selection with empty content effectively removes it.
let remove_adjoin = pair[0].new_text.is_empty()
&& (end.line == start.line && end.column + 1 == start.column)
|| (end.line + 1 == start.line && end.column == EOL_OFFSET && start.column == 1);
// Inserting in the same place doesn't produce extra selection.
let insert_the_same = end.line == start.line && end.column == start.column;
if remove_adjoin || insert_the_same {
Some(i)
} else {
None
}
})
.collect::<HashSet<_>>();
let mut selection_index = 0;
let apply_edits = edits
.iter()
.enumerate()
.map(
|(
i,
KakouneTextEdit {
new_text, command, ..
},
)| {
let command = match command {
KakouneTextEditCommand::InsertBefore => "lsp-insert-before-selection",
KakouneTextEditCommand::Replace => "lsp-replace-selection",
};
let command = format!(
"exec 'z{}<space>'
{} {}",
if selection_index > 0 {
format!("{})", selection_index)
} else {
String::new()
},
command,
editor_quote(&new_text)
);
if !merged_selections.contains(&i) {
selection_index += 1;
}
command
},
)
.join("\n");
let command = format!(
"select {}
exec -save-regs '' Z
{}",
select_edits, apply_edits
);
let command = format!("eval -draft -save-regs '^' {}", editor_quote(&command));
uri.and_then(|uri| uri.to_file_path().ok())
.and_then(|path| {
path.to_str().map(|buffile| {
format!(
"eval -buffer {} {}",
editor_quote(buffile),
editor_quote(&command)
)
})
})
.or(Some(command))
}
enum KakouneTextEditCommand {
InsertBefore,
Replace,
}
struct KakouneTextEdit {
range: KakouneRange,
new_text: String,
command: KakouneTextEditCommand,
}
fn lsp_text_edit_to_kakoune(
text_edit: &OneOf<TextEdit, AnnotatedTextEdit>,
text: &Rope,
offset_encoding: OffsetEncoding,
) -> KakouneTextEdit {
let TextEdit { range, new_text } = match text_edit {
OneOf::Left(edit) => edit,
OneOf::Right(annotated_edit) => &annotated_edit.text_edit,
};
let Range { start, end } = range;
let insert = start.line == end.line && start.character == end.character;
let range = lsp_range_to_kakoune(&range, text, offset_encoding);
let command = if insert {
KakouneTextEditCommand::InsertBefore
} else {
KakouneTextEditCommand::Replace
};
KakouneTextEdit {
range,
new_text: new_text.to_owned(),
command,
}
}
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// #11612
// We weren't updating the auto adjustments with all the resolved
// type information after type check.
// pretty-expanded FIXME #23616
trait A { fn dummy(&self) { } }
struct B<'a, T:'a> {
f: &'a T
}
impl<'a, T> A for B<'a, T> {}
fn foo(_: &A) {}
fn bar<G>(b: &B<G>) {
foo(b); // Coercion should work
foo(b as &A); // Explicit cast should work as well
}
fn main() {}
|
use alloc::collections::VecDeque;
use spin::Mutex;
pub static LOG: Mutex<Option<Log>> = Mutex::new(None);
pub fn init() {
*LOG.lock() = Some(Log::new(1024 * 1024));
}
pub struct Log {
data: VecDeque<u8>,
size: usize,
}
impl Log {
pub fn new(size: usize) -> Log {
Log {
data: VecDeque::with_capacity(size),
size
}
}
pub fn read(&self) -> (&[u8], &[u8]) {
self.data.as_slices()
}
pub fn write(&mut self, buf: &[u8]) {
for &b in buf {
while self.data.len() + 1 >= self.size {
self.data.pop_front();
}
self.data.push_back(b);
}
}
}
|
//! Worst-case optimal, n-way joins.
//!
//! This is an extended implementation of Delta-BiGJoin, by Ammar, McSherry,
//! Salihoglu, and Joglekar ([paper](https://dl.acm.org/citation.cfm?id=3199520)).
//!
//! The overall structure and the CollectionExtender implementation is adapted from:
//! https://github.com/frankmcsherry/differential-dataflow/tree/master/dogsdogsdogs
use std::collections::{HashMap, HashSet, VecDeque};
use std::hash::Hash;
use std::rc::Rc;
use timely::dataflow::channels::pact::{Exchange, Pipeline};
use timely::dataflow::operators::{Concatenate, Operator, Partition};
use timely::dataflow::scopes::child::Iterative;
use timely::dataflow::Scope;
use timely::order::Product;
use timely::progress::Timestamp;
use timely::worker::AsWorker;
use timely::PartialOrder;
use timely_sort::Unsigned;
use differential_dataflow::lattice::Lattice;
use differential_dataflow::operators::arrange::Arranged;
use differential_dataflow::operators::{Consolidate, Count};
use differential_dataflow::trace::{BatchReader, Cursor, TraceReader};
use differential_dataflow::{AsCollection, Collection, ExchangeData, Hashable};
use crate::binding::{AsBinding, BinaryPredicate, Binding};
use crate::binding::{BinaryPredicateBinding, ConstantBinding};
use crate::domain::Domain;
use crate::logging::DeclarativeEvent;
use crate::plan::{Dependencies, Implementable};
use crate::timestamp::{altneu::AltNeu, Rewind};
use crate::{AsAid, Value, Var};
use crate::{CollectionRelation, Implemented, ShutdownHandle, VariableMap};
type Extender<'a, S, P, V> = Box<(dyn PrefixExtender<S, Prefix = P, Extension = V> + 'a)>;
/// A type capable of extending a stream of prefixes. Implementors of
/// `PrefixExtension` provide types and methods for extending a
/// differential dataflow collection, via the three methods `count`,
/// `propose`, and `validate`.
trait PrefixExtender<G: Scope> {
/// The required type of prefix to extend.
type Prefix;
/// The type to be produced as extension.
type Extension;
/// Annotates prefixes with the number of extensions the relation would propose.
fn count(
&mut self,
prefixes: &Collection<G, (Self::Prefix, usize, usize)>,
index: usize,
) -> Option<Collection<G, (Self::Prefix, usize, usize)>>;
/// Extends each prefix with corresponding extensions.
fn propose(
&mut self,
prefixes: &Collection<G, Self::Prefix>,
) -> Collection<G, (Self::Prefix, Self::Extension)>;
/// Restricts proposed extensions by those the extender would have proposed.
fn validate(
&mut self,
extensions: &Collection<G, (Self::Prefix, Self::Extension)>,
) -> Collection<G, (Self::Prefix, Self::Extension)>;
}
trait IntoExtender<'a, S, V>
where
S: Scope,
S::Timestamp: Timestamp + Lattice,
V: ExchangeData + Hash,
{
fn into_extender<P: ExchangeData + IndexNode<V>, B: AsBinding + std::fmt::Debug>(
&self,
prefix: &B,
) -> Vec<Extender<'a, S, P, V>>;
}
impl<'a, S> IntoExtender<'a, S, Value> for ConstantBinding
where
S: Scope,
S::Timestamp: Timestamp + Lattice,
{
fn into_extender<P: ExchangeData + IndexNode<Value>, B: AsBinding + std::fmt::Debug>(
&self,
_prefix: &B,
) -> Vec<Extender<'a, S, P, Value>> {
vec![Box::new(ConstantExtender {
phantom: std::marker::PhantomData,
value: self.value.clone(),
})]
}
}
impl<'a, S, V> IntoExtender<'a, S, V> for BinaryPredicateBinding
where
S: Scope,
S::Timestamp: Timestamp + Lattice,
V: ExchangeData + Hash,
{
fn into_extender<P: ExchangeData + IndexNode<V>, B: AsBinding + std::fmt::Debug>(
&self,
prefix: &B,
) -> Vec<Extender<'a, S, P, V>> {
match direction(prefix, self.variables) {
Err(_msg) => {
// We won't panic here, this just means the predicate's variables
// aren't sufficiently bound by the prefixes yet.
vec![]
}
Ok(direction) => vec![Box::new(BinaryPredicateExtender {
phantom: std::marker::PhantomData,
predicate: self.predicate.clone(),
direction,
})],
}
}
}
//
// OPERATOR
//
/// A plan stage joining two source relations on the specified
/// variables. Throws if any of the join variables isn't bound by both
/// sources.
#[derive(Hash, PartialEq, Eq, PartialOrd, Ord, Clone, Debug, Serialize, Deserialize)]
pub struct Hector<A: AsAid> {
/// Variables to bind.
pub variables: Vec<Var>,
/// Bindings to join.
pub bindings: Vec<Binding<A>>,
}
enum Direction {
Forward(usize),
Reverse(usize),
}
fn direction<P>(prefix: &P, extender_variables: (Var, Var)) -> Result<Direction, &'static str>
where
P: AsBinding + std::fmt::Debug,
{
match AsBinding::binds(prefix, extender_variables.0) {
None => match AsBinding::binds(prefix, extender_variables.1) {
None => {
error!(
"Neither extender variable {:?} bound by prefix {:?}.",
extender_variables, prefix
);
Err("Neither extender variable bound by prefix.")
}
Some(offset) => Ok(Direction::Reverse(offset)),
},
Some(offset) => {
match AsBinding::binds(prefix, extender_variables.1) {
Some(_) => Err("Both extender variables already bound by prefix."),
None => {
// Prefix binds the first extender variable, but not
// the second. Can use forward index.
Ok(Direction::Forward(offset))
}
}
}
}
}
/// Bindings can be in conflict with the source binding of a given
/// delta pipeline. We need to identify them and handle them as
/// special cases, because we always have to start from prefixes of
/// size two.
pub fn source_conflicts<A: AsAid>(
source_index: usize,
bindings: &[Binding<A>],
) -> Vec<&Binding<A>> {
match bindings[source_index] {
Binding::Attribute(ref source) => {
let prefix_0 = vec![source.variables.0];
let prefix_1 = vec![source.variables.1];
bindings
.iter()
.enumerate()
.flat_map(|(index, binding)| {
if index == source_index {
None
} else if binding.can_extend(&prefix_0, source.variables.1)
|| binding.can_extend(&prefix_1, source.variables.0)
{
Some(binding)
} else {
None
}
})
.collect()
}
_ => panic!("Source must be an AttributeBinding."),
}
}
/// Orders the variables s.t. each has at least one binding from
/// itself to a prior variable. `source_binding` indicates the binding
/// from which we will source the prefixes in the resulting delta
/// pipeline. Returns the chosen variable order and the corresponding
/// binding order.
///
/// (adapted from github.com/frankmcsherry/dataflow-join/src/motif.rs)
pub fn plan_order<A: AsAid>(
source_index: usize,
bindings: &[Binding<A>],
) -> (Vec<Var>, Vec<Binding<A>>) {
let mut variables = bindings
.iter()
.flat_map(AsBinding::variables)
.collect::<Vec<Var>>();
variables.sort();
variables.dedup();
// Determine an order on the attributes. The order may not
// introduce a binding until one of its consituents is already
// bound by the prefix. These constraints are captured via the
// `AsBinding::ready_to_extend` method. The order may otherwise be
// arbitrary, for example selecting the most constrained attribute
// first. Presently, we just pick attributes arbitrarily.
let mut prefix: Vec<Var> = Vec::with_capacity(variables.len());
match bindings[source_index] {
Binding::Attribute(ref source) => {
prefix.push(source.variables.0);
prefix.push(source.variables.1);
}
_ => panic!("Source binding must be an attribute."),
}
let candidates_for = |bindings: &[Binding<A>], target: Var| {
bindings
.iter()
.enumerate()
.flat_map(move |(index, other)| {
if index == source_index {
// Ignore the source binding itself.
None
} else if other.binds(target).is_some() {
Some(other.clone())
} else {
// Some bindings might not even talk about the target
// variable.
None
}
})
.collect::<Vec<Binding<A>>>()
};
let mut ordered_bindings = Vec::new();
let mut candidates: Vec<Binding<A>> = prefix
.iter()
.flat_map(|x| candidates_for(&bindings, *x))
.collect();
loop {
debug!("Candidates: {:?}", candidates);
let mut waiting_candidates = Vec::new();
candidates.sort();
candidates.dedup();
for candidate in candidates.drain(..) {
match candidate.ready_to_extend(&prefix) {
None => {
waiting_candidates.push(candidate);
}
Some(target) => {
if AsBinding::binds(&prefix, target).is_none() {
prefix.push(target);
for new_candidate in candidates_for(&bindings, target) {
if candidate != new_candidate {
waiting_candidates.push(new_candidate);
}
}
}
ordered_bindings.push(candidate);
}
}
}
if waiting_candidates.is_empty() {
break;
}
for candidate in waiting_candidates.drain(..) {
candidates.push(candidate);
}
if prefix.len() == variables.len() {
break;
}
}
debug!("Candidates: {:?}", candidates);
for candidate in candidates.drain(..) {
ordered_bindings.push(candidate);
}
(prefix, ordered_bindings)
}
trait IndexNode<V> {
fn index(&self, index: usize) -> V;
}
impl IndexNode<Value> for (Value, Value) {
#[inline(always)]
fn index(&self, index: usize) -> Value {
assert!(index <= 1);
if index == 0 {
self.0.clone()
} else {
self.1.clone()
}
}
}
impl IndexNode<Value> for (&Value, &Value) {
#[inline(always)]
fn index(&self, index: usize) -> Value {
assert!(index <= 1);
if index == 0 {
self.0.clone()
} else {
self.1.clone()
}
}
}
impl IndexNode<Value> for Vec<Value> {
#[inline(always)]
fn index(&self, index: usize) -> Value {
self[index].clone()
}
}
impl<A: AsAid> Hector<A> {
// @TODO pass single binding as argument?
// @TODO make these static and take variables as well?
fn implement_single_binding<'b, S>(
&self,
nested: &mut Iterative<'b, S, u64>,
domain: &mut Domain<A, S::Timestamp>,
_local_arrangements: &VariableMap<A, Iterative<'b, S, u64>>,
) -> (Implemented<'b, A, S>, ShutdownHandle)
where
S: Scope,
S::Timestamp: Timestamp + Lattice + Rewind,
{
// With only a single binding given, we don't want to do
// anything fancy (provided the binding is sourceable).
match self.bindings.first().unwrap() {
Binding::Attribute(binding) => {
match domain.forward_propose(&binding.source_attribute) {
None => panic!("Unknown attribute {}", &binding.source_attribute),
Some(forward_trace) => {
let name = format!("Propose({})", &binding.source_attribute);
let (forward, shutdown_forward) =
forward_trace.import_frontier(&nested.parent, &name);
let prefix = binding.variables();
let target_variables = self.variables.clone();
let tuples = forward.enter(nested).as_collection(move |e, v| {
let tuple = (e, v);
target_variables
.iter()
.flat_map(|x| {
Some(tuple.index(AsBinding::binds(&prefix, *x).unwrap()))
})
.collect()
});
let relation = CollectionRelation {
variables: self.variables.clone(),
tuples,
};
(
Implemented::Collection(relation),
ShutdownHandle::from_button(shutdown_forward),
)
}
}
}
_ => {
panic!("Passed a single, non-sourceable binding.");
}
}
}
// fn two_way<'b, S>(
// nested: &mut Iterative<'b, S, u64>,
// _local_arrangements: &VariableMap<Self::A, Iterative<'b, S, u64>>,
// context: &mut I,
// left: Binding,
// right: Binding,
// ) -> (Implemented<'b, Self::A, S>, ShutdownHandle)
// where
// T: Timestamp + Lattice,
// S: Scope<Timestamp = T>,
// {
// let (source, right) = match left {
// Binding::Attribute(source) => (source, right),
// _ => match right {
// Binding::Attribute(source) => (source, left),
// _ => panic!("At least one binding must be sourceable for Hector::two_way."),
// }
// };
// match right {
// Binding::Constant(constant_binding) => {
// let match_v = constant_binding.value;
// let offset = source.binds(constant_binding.variable)
// .unwrap_or_else(|| panic!("Source doesn't bind constant binding {:?}", constant_binding));
// match offset {
// 0 => {
// // [?a :edge ?b] (constant ?a x) <=> [x :edge ?b]
// let (index, shutdown) = domain.forward_propose(&source.source_attribute)
// .unwrap_or_else(|| panic!("No forward index found for attribute {}", &source.source_attribute))
// .import_core(&nested.parent, &source.source_attribute);
// let frontier: Vec<T> = index.trace.advance_frontier().to_vec();
// index
// .filter(move |e, _v| *e == match_v)
// .enter_at(&nested, move |_, _, time| {
// let mut forwarded = time.clone(); forwarded.advance_by(&frontier);
// Product::new(forwarded, Default::default())
// })
// }
// 1 => {
// // [?a :edge ?b] (constant ?b x) <=> [?a :edge x]
// let (index, shutdown) = domain.reverse_propose(&source.source_attribute)
// .unwrap_or_else(|| panic!("No reverse index found for attribute {}", &source.source_attribute))
// .import_core(&nested.parent, &source.source_attribute);
// let frontier: Vec<T> = index.trace.advance_frontier().to_vec();
// index
// .filter(move |e, _v| *e == match_v)
// .enter_at(&nested, move |_, _, time| {
// let mut forwarded = time.clone(); forwarded.advance_by(&frontier);
// Product::new(forwarded, Default::default())
// })
// }
// other => panic!("Unexpected offset {}", other),
// }
// }
// _ => unimplemented!(),
// }
// }
}
impl<A> Implementable for Hector<A>
where
A: AsAid,
{
type A = A;
fn dependencies(&self) -> Dependencies<A> {
let attributes = self
.bindings
.iter()
.flat_map(|binding| {
if let Binding::Attribute(binding) = binding {
Some(binding.source_attribute.clone())
} else {
None
}
})
.collect::<HashSet<A>>();
Dependencies {
names: HashSet::new(),
attributes,
}
}
fn into_bindings(&self) -> Vec<Binding<Self::A>> {
self.bindings.clone()
}
fn implement<'b, S>(
&self,
nested: &mut Iterative<'b, S, u64>,
domain: &mut Domain<A, S::Timestamp>,
local_arrangements: &VariableMap<A, Iterative<'b, S, u64>>,
) -> (Implemented<'b, Self::A, S>, ShutdownHandle)
where
S: Scope,
S::Timestamp: Timestamp + Lattice + Rewind,
{
if self.bindings.is_empty() {
panic!("No bindings passed.");
} else if self.variables.is_empty() {
panic!("No variables requested.");
} else if self.bindings.len() == 1 {
self.implement_single_binding(nested, domain, local_arrangements)
// } else if self.bindings.len() == 2 {
// Hector::two_way(domain, local_arrangements, self.bindings[0].clone(), self.bindings[1].clone())
} else {
// In order to avoid delta pipelines looking at each
// other's data in naughty ways, we need to run them all
// inside a scope with lexicographic times.
let (joined, shutdown_handle) = nested.scoped::<AltNeu<Product<S::Timestamp,u64>>, _, _>("AltNeu", |inner| {
let scope = inner.clone();
// We cache aggressively, to avoid importing and
// wrapping things more than once.
let mut shutdown_handle = ShutdownHandle::empty();
let mut forward_counts = HashMap::new();
let mut forward_proposes = HashMap::new();
let mut forward_validates = HashMap::new();
let mut reverse_counts = HashMap::new();
let mut reverse_proposes = HashMap::new();
let mut reverse_validates = HashMap::new();
// Attempt to acquire a logger for tuple counts.
let logger = {
let register = scope.parent.log_register();
register.get::<DeclarativeEvent>("declarative")
};
// For each AttributeBinding (only AttributeBindings
// actually experience change), we construct a delta query
// driven by changes to that binding.
let changes = self.bindings.iter().enumerate()
.flat_map(|(idx, delta_binding)| match delta_binding {
Binding::Attribute(delta_binding) => {
// We need to determine an order on the attributes
// that ensures that each is bound by preceeding
// attributes. For now, we will take the requested order.
// @TODO use binding order returned here?
// might be problematic to ensure ordering is maintained?
let (variables, _) = plan_order(idx, &self.bindings);
let mut prefix = Vec::with_capacity(variables.len());
debug!("Source {:?}", delta_binding);
// We would like to avoid starting with single-variable
// (or even empty) prefixes, because the dataflow-y nature
// of this implementation means we will always be starting
// from attributes (which correspond to two-variable prefixes).
//
// But to get away with that we need to check for single-variable
// bindings in conflict with the source binding.
let propose = forward_proposes
.entry(delta_binding.source_attribute.to_string())
.or_insert_with(|| {
let (arranged, shutdown) = domain
.forward_propose(&delta_binding.source_attribute)
.expect("forward propose trace doesn't exist")
.import_frontier(&scope.parent.parent, &format!("Counts({:?})", &delta_binding.source_attribute));
shutdown_handle.add_button(shutdown);
arranged
});
let mut source_conflicts = source_conflicts(idx, &self.bindings);
let mut source = if !source_conflicts.is_empty() {
// @TODO there can be more than one conflict
// @TODO Not just constant bindings can cause issues here!
assert_eq!(source_conflicts.len(), 1);
let conflict = source_conflicts.pop().unwrap();
// for conflict in source_conflicts.drain(..) {
match conflict {
Binding::Constant(constant_binding) => {
prefix.push(constant_binding.variable);
let match_v = constant_binding.value.clone();
// Guaranteed to intersect with offset zero at this point.
match direction(&prefix, delta_binding.variables).unwrap() {
Direction::Forward(_) => {
prefix.push(delta_binding.variables.1);
propose
.filter(move |e, _v| *e == match_v)
.enter(&scope.parent)
.enter(&scope)
.as_collection(|e,v| vec![e.clone(), v.clone()])
}
Direction::Reverse(_) => {
prefix.push(delta_binding.variables.0);
propose
.filter(move |_e, v| *v == match_v)
.enter(&scope.parent)
.enter(&scope)
.as_collection(|v,e| vec![e.clone(), v.clone()])
}
}
}
_ => panic!("Can't resolve conflicts on {:?} bindings", conflict),
// }
}
} else {
prefix.push(delta_binding.variables.0);
prefix.push(delta_binding.variables.1);
propose
.enter(&scope.parent)
.enter(&scope)
.as_collection(|e,v| vec![e.clone(), v.clone()])
};
for target in variables.iter() {
match AsBinding::binds(&prefix, *target) {
Some(_) => { /* already bound */ continue },
None => {
debug!("Extending {:?} to {:?}", prefix, target);
let mut extenders: Vec<Extender<'_, _, Vec<Value>, _>> = vec![];
// Handling AntijoinBinding's requires dealing with recursion,
// because they wrap another binding. We don't actually want to wrap
// all of the below inside of a recursive function, because passing
// all these nested scopes and caches around leads to a world of lifetimes pain.
//
// Therefore we make our own little queue of bindings and process them iteratively.
let mut bindings: VecDeque<(usize, Binding<A>)> = VecDeque::new();
for (idx, binding) in self.bindings.iter().cloned().enumerate() {
if let Binding::Not(antijoin_binding) = binding {
bindings.push_back((idx, (*antijoin_binding.binding).clone()));
bindings.push_back((idx, Binding::Not(antijoin_binding)));
} else {
bindings.push_back((idx, binding));
}
}
while let Some((other_idx, other)) = bindings.pop_front() {
// We need to distinguish between conflicting relations
// that appear before the current one in the sequence (< idx),
// and those that appear afterwards.
// Ignore the current delta source itself.
if other_idx == idx { continue; }
// Ignore any binding not talking about the target variable.
if other.binds(*target).is_none() { continue; }
// Ignore any binding that isn't ready to extend, either
// because it doesn't even talk about the target variable, or
// because none of its dependent variables are bound by the prefix
// yet (relevant for attributes).
if !other.can_extend(&prefix, *target) {
debug!("{:?} can't extend", other);
continue;
}
let is_neu = other_idx >= idx;
debug!("\t...using {:?}", other);
match other {
Binding::Not(_other) => {
// Due to the way we enqueued the bindings above, we can now
// rely on the internal exteneder being available as the last
// extender on the stack.
let internal_extender = extenders.pop().expect("No internal extender available on stack.");
extenders.push(
Box::new(AntijoinExtender {
phantom: std::marker::PhantomData,
extender: internal_extender,
})
);
}
Binding::Constant(other) => {
extenders.append(&mut other.into_extender(&prefix));
}
Binding::BinaryPredicate(other) => {
extenders.append(&mut other.into_extender(&prefix));
}
Binding::Attribute(other) => {
match direction(&prefix, other.variables) {
Err(msg) => panic!(msg),
Ok(direction) => match direction {
Direction::Forward(offset) => {
let count = {
let name = format!("Counts({:?})", &delta_binding.source_attribute);
let count = forward_counts
.entry(other.source_attribute.to_string())
.or_insert_with(|| {
let (arranged, shutdown) =
domain.forward_count(&other.source_attribute)
.expect("forward count doesn't exist")
.import_frontier(&scope.parent.parent, &name);
shutdown_handle.add_button(shutdown);
arranged
});
let neu = is_neu;
count
.enter(&scope.parent)
.enter_at(&scope, move |_,_,t| AltNeu { time: t.clone(), neu })
};
;
let propose = {
let propose = forward_proposes
.entry(other.source_attribute.to_string())
.or_insert_with(|| {
let name = format!("Propose({:?})", &delta_binding.source_attribute);
let (arranged, shutdown) = domain
.forward_propose(&other.source_attribute)
.expect("forward propose doesn't exist")
.import_frontier(&scope.parent.parent, &name);
shutdown_handle.add_button(shutdown);
arranged
});
let neu = is_neu;
propose
.enter(&scope.parent)
.enter_at(&scope, move |_,_,t| AltNeu { time: t.clone(), neu })
};
let validate = {
let validate = forward_validates
.entry(other.source_attribute.to_string())
.or_insert_with(|| {
let name = format!("Validate({:?})", &delta_binding.source_attribute);
let (arranged, shutdown) = domain
.forward_validate(&other.source_attribute)
.expect("forward validate doesn't exist")
.import_frontier(&scope.parent.parent, &name);
shutdown_handle.add_button(shutdown);
arranged
});
let neu = is_neu;
validate
.enter(&scope.parent)
.enter_at(&scope, move |_,_,t| AltNeu { time: t.clone(), neu })
};
extenders.push(
Box::new(CollectionExtender {
phantom: std::marker::PhantomData,
count,
propose,
validate,
key_selector: Rc::new(move |prefix: &Vec<Value>| prefix.index(offset)),
})
);
},
Direction::Reverse(offset) => {
let count = {
let count = reverse_counts
.entry(other.source_attribute.to_string())
.or_insert_with(|| {
let name = format!("_Counts({:?})", &delta_binding.source_attribute);
let (arranged, shutdown) = domain
.reverse_count(&other.source_attribute)
.expect("reverse count doesn't exist")
.import_frontier(&scope.parent.parent, &name);
shutdown_handle.add_button(shutdown);
arranged
});
let neu = is_neu;
count
.enter(&scope.parent)
.enter_at(&scope, move |_,_,t| AltNeu { time: t.clone(), neu })
};
;
let propose = {
let propose = reverse_proposes
.entry(other.source_attribute.to_string())
.or_insert_with(|| {
let name = format!("_Propose({:?})", &delta_binding.source_attribute);
let (arranged, shutdown) = domain
.reverse_propose(&other.source_attribute)
.expect("reverse propose doesn't exist")
.import_frontier(&scope.parent.parent, &name);
shutdown_handle.add_button(shutdown);
arranged
});
let neu = is_neu;
propose
.enter(&scope.parent)
.enter_at(&scope, move |_,_,t| AltNeu { time: t.clone(), neu })
};
let validate = {
let validate = reverse_validates
.entry(other.source_attribute.to_string())
.or_insert_with(|| {
let name = format!("_Validate({:?})", &delta_binding.source_attribute);
let (arranged, shutdown) = domain
.reverse_validate(&other.source_attribute)
.expect("reverse validate doesn't exist")
.import_frontier(&scope.parent.parent, &name);
shutdown_handle.add_button(shutdown);
arranged
});
let neu = is_neu;
validate
.enter(&scope.parent)
.enter_at(&scope, move |_,_,t| AltNeu { time: t.clone(), neu })
};
extenders.push(
Box::new(CollectionExtender {
phantom: std::marker::PhantomData,
count,
propose,
validate,
key_selector: Rc::new(move |prefix: &Vec<Value>| prefix.index(offset)),
})
);
},
}
}
}
}
}
prefix.push(*target);
// @TODO impl ProposeExtensionMethod for Arranged
let extended = source.extend(&mut extenders[..]);
if logger.is_some() {
let worker_index = scope.index();
let source_attribute = delta_binding.source_attribute.to_string();
extended
// .inspect(move |x| { println!("{} extended: {:?}", source_attribute, x); })
.map(|_| ())
.consolidate()
.count()
.map(move |(_, count)| (Value::Eid(worker_index as u64), Value::Number(count as i64)))
.leave()
.leave()
.inspect(move |x| { println!("{}: {:?}", source_attribute, x); });
}
source = extended
.map(|(tuple,v)| {
let mut out = Vec::with_capacity(tuple.len() + 1);
out.append(&mut tuple.clone());
out.push(v);
out
})
}
}
}
if self.variables == prefix {
Some(source.inner)
} else {
let target_variables = self.variables.clone();
Some(source
.map(move |tuple| {
target_variables.iter()
.flat_map(|x| Some(tuple.index(AsBinding::binds(&prefix, *x).unwrap())))
.collect()
})
.inner)
}
}
_ => None
});
(inner.concatenate(changes).as_collection().leave(), shutdown_handle)
});
let relation = CollectionRelation {
variables: self.variables.clone(),
tuples: joined,
};
(Implemented::Collection(relation), shutdown_handle)
}
}
}
//
// GENERIC IMPLEMENTATION
//
trait ProposeExtensionMethod<'a, S: Scope, P: ExchangeData + Ord> {
fn extend<E: ExchangeData + Ord>(
&self,
extenders: &mut [Extender<'a, S, P, E>],
) -> Collection<S, (P, E)>;
}
impl<'a, S: Scope, P: ExchangeData + Ord> ProposeExtensionMethod<'a, S, P> for Collection<S, P> {
fn extend<E: ExchangeData + Ord>(
&self,
extenders: &mut [Extender<'a, S, P, E>],
) -> Collection<S, (P, E)> {
if extenders.is_empty() {
// @TODO don't panic
panic!("No extenders specified.");
} else if extenders.len() == 1 {
extenders[0].propose(&self.clone())
} else {
let mut counts = self.map(|p| (p, 1 << 31, 0));
for (index, extender) in extenders.iter_mut().enumerate() {
if let Some(new_counts) = extender.count(&counts, index) {
counts = new_counts;
}
}
let parts = counts
.inner
.partition(extenders.len() as u64, |((p, _, i), t, d)| {
(i as u64, (p, t, d))
});
let mut results = Vec::new();
for (index, nominations) in parts.into_iter().enumerate() {
let mut extensions = extenders[index].propose(&nominations.as_collection());
for other in (0..extenders.len()).filter(|&x| x != index) {
extensions = extenders[other].validate(&extensions);
}
results.push(extensions.inner); // save extensions
}
self.scope().concatenate(results).as_collection()
}
}
}
struct ConstantExtender<P, V>
where
V: ExchangeData + Hash,
{
phantom: std::marker::PhantomData<P>,
value: V,
}
impl<'a, S, V, P> PrefixExtender<S> for ConstantExtender<P, V>
where
S: Scope,
S::Timestamp: Lattice + ExchangeData,
V: ExchangeData + Hash,
P: ExchangeData,
{
type Prefix = P;
type Extension = V;
fn count(
&mut self,
prefixes: &Collection<S, (P, usize, usize)>,
index: usize,
) -> Option<Collection<S, (P, usize, usize)>> {
Some(prefixes.map(move |(prefix, old_count, old_index)| {
if 1 < old_count {
(prefix.clone(), 1, index)
} else {
(prefix.clone(), old_count, old_index)
}
}))
}
fn propose(&mut self, prefixes: &Collection<S, P>) -> Collection<S, (P, V)> {
let value = self.value.clone();
prefixes.map(move |prefix| (prefix.clone(), value.clone()))
}
fn validate(&mut self, extensions: &Collection<S, (P, V)>) -> Collection<S, (P, V)> {
let target = self.value.clone();
extensions.filter(move |(_prefix, extension)| *extension == target)
}
}
struct BinaryPredicateExtender<P, V>
where
V: ExchangeData + Hash,
{
phantom: std::marker::PhantomData<(P, V)>,
predicate: BinaryPredicate,
direction: Direction,
}
impl<'a, S, V, P> PrefixExtender<S> for BinaryPredicateExtender<P, V>
where
S: Scope,
S::Timestamp: Lattice + ExchangeData,
V: ExchangeData + Hash,
P: ExchangeData + IndexNode<V>,
{
type Prefix = P;
type Extension = V;
fn count(
&mut self,
_prefixes: &Collection<S, (P, usize, usize)>,
_index: usize,
) -> Option<Collection<S, (P, usize, usize)>> {
None
}
fn propose(&mut self, prefixes: &Collection<S, P>) -> Collection<S, (P, V)> {
prefixes.map(|_prefix| panic!("BinaryPredicateExtender should never be asked to propose."))
}
fn validate(&mut self, extensions: &Collection<S, (P, V)>) -> Collection<S, (P, V)> {
use self::BinaryPredicate::{EQ, GT, GTE, LT, LTE, NEQ};
match self.direction {
Direction::Reverse(offset) => {
match self.predicate {
LT => extensions
.filter(move |(prefix, extension)| *extension > prefix.index(offset)),
LTE => extensions
.filter(move |(prefix, extension)| *extension >= prefix.index(offset)),
GT => extensions
.filter(move |(prefix, extension)| *extension < prefix.index(offset)),
GTE => extensions
.filter(move |(prefix, extension)| *extension <= prefix.index(offset)),
EQ => extensions
.filter(move |(prefix, extension)| *extension == prefix.index(offset)),
NEQ => extensions
.filter(move |(prefix, extension)| *extension != prefix.index(offset)),
}
}
Direction::Forward(offset) => {
match self.predicate {
LT => extensions
.filter(move |(prefix, extension)| *extension < prefix.index(offset)),
LTE => extensions
.filter(move |(prefix, extension)| *extension <= prefix.index(offset)),
GT => extensions
.filter(move |(prefix, extension)| *extension > prefix.index(offset)),
GTE => extensions
.filter(move |(prefix, extension)| *extension >= prefix.index(offset)),
EQ => extensions
.filter(move |(prefix, extension)| *extension == prefix.index(offset)),
NEQ => extensions
.filter(move |(prefix, extension)| *extension != prefix.index(offset)),
}
}
}
}
}
struct CollectionExtender<S, K, V, P, F, TrCount, TrPropose, TrValidate>
where
S: Scope,
S::Timestamp: Lattice + ExchangeData,
K: ExchangeData,
V: ExchangeData,
F: Fn(&P) -> K,
TrCount: TraceReader<Key = K, Val = (), Time = S::Timestamp, R = isize> + Clone + 'static,
TrCount::Batch: BatchReader<TrCount::Key, TrCount::Val, S::Timestamp, TrCount::R> + 'static,
TrCount::Cursor: Cursor<TrCount::Key, TrCount::Val, S::Timestamp, TrCount::R> + 'static,
TrPropose: TraceReader<Key = K, Val = V, Time = S::Timestamp, R = isize> + Clone + 'static,
TrPropose::Batch:
BatchReader<TrPropose::Key, TrPropose::Val, S::Timestamp, TrPropose::R> + 'static,
TrPropose::Cursor: Cursor<TrPropose::Key, TrPropose::Val, S::Timestamp, TrPropose::R> + 'static,
TrValidate:
TraceReader<Key = (K, V), Val = (), Time = S::Timestamp, R = isize> + Clone + 'static,
TrValidate::Batch:
BatchReader<TrValidate::Key, TrValidate::Val, S::Timestamp, TrValidate::R> + 'static,
TrValidate::Cursor:
Cursor<TrValidate::Key, TrValidate::Val, S::Timestamp, TrValidate::R> + 'static,
{
phantom: std::marker::PhantomData<P>,
count: Arranged<S, TrCount>,
propose: Arranged<S, TrPropose>,
validate: Arranged<S, TrValidate>,
key_selector: Rc<F>,
}
impl<'a, S, K, V, P, F, TrCount, TrPropose, TrValidate> PrefixExtender<S>
for CollectionExtender<S, K, V, P, F, TrCount, TrPropose, TrValidate>
where
S: Scope,
S::Timestamp: Lattice + ExchangeData,
K: ExchangeData + Hash,
V: ExchangeData + Hash,
P: ExchangeData,
F: Fn(&P) -> K + 'static,
TrCount: TraceReader<Key = K, Val = (), Time = S::Timestamp, R = isize> + Clone + 'static,
TrCount::Batch: BatchReader<TrCount::Key, TrCount::Val, S::Timestamp, TrCount::R> + 'static,
TrCount::Cursor: Cursor<TrCount::Key, TrCount::Val, S::Timestamp, TrCount::R> + 'static,
TrPropose: TraceReader<Key = K, Val = V, Time = S::Timestamp, R = isize> + Clone + 'static,
TrPropose::Batch:
BatchReader<TrPropose::Key, TrPropose::Val, S::Timestamp, TrPropose::R> + 'static,
TrPropose::Cursor: Cursor<TrPropose::Key, TrPropose::Val, S::Timestamp, TrPropose::R> + 'static,
TrValidate:
TraceReader<Key = (K, V), Val = (), Time = S::Timestamp, R = isize> + Clone + 'static,
TrValidate::Batch:
BatchReader<TrValidate::Key, TrValidate::Val, S::Timestamp, TrValidate::R> + 'static,
TrValidate::Cursor:
Cursor<TrValidate::Key, TrValidate::Val, S::Timestamp, TrValidate::R> + 'static,
{
type Prefix = P;
type Extension = V;
fn count(
&mut self,
prefixes: &Collection<S, (P, usize, usize)>,
index: usize,
) -> Option<Collection<S, (P, usize, usize)>> {
// This method takes a stream of `(prefix, time, diff)`
// changes, and we want to produce the corresponding stream of
// `((prefix, count), time, diff)` changes, just by looking up
// `count` in `count_trace`. We are just doing a stream of
// changes and a stream of look-ups, no consolidation or any
// funny business like that. We *could* organize the input
// differences by key and save some time, or we could skip
// that.
let counts = &self.count;
let mut counts_trace = Some(counts.trace.clone());
let mut stash = HashMap::new();
let logic1 = self.key_selector.clone();
let logic2 = self.key_selector.clone();
let exchange = Exchange::new(move |update: &((P, usize, usize), S::Timestamp, isize)| {
logic1(&(update.0).0).hashed().as_u64()
});
let mut buffer1 = Vec::new();
let mut buffer2 = Vec::new();
// TODO: This should be a custom operator with no connection from the second input to the output.
Some(
prefixes
.inner
.binary_frontier(&counts.stream, exchange, Pipeline, "Count", move |_, _| {
move |input1, input2, output| {
// drain the first input, stashing requests.
input1.for_each(|capability, data| {
data.swap(&mut buffer1);
stash
.entry(capability.retain())
.or_insert_with(Vec::new)
.extend(buffer1.drain(..))
});
// advance the `distinguish_since` frontier to allow all merges.
input2.for_each(|_, batches| {
batches.swap(&mut buffer2);
for batch in buffer2.drain(..) {
if let Some(ref mut trace) = counts_trace {
trace.distinguish_since(batch.upper());
}
}
});
if let Some(ref mut trace) = counts_trace {
for (capability, prefixes) in stash.iter_mut() {
// defer requests at incomplete times.
// NOTE: not all updates may be at complete times, but if this test fails then none of them are.
if !input2.frontier.less_equal(capability.time()) {
let mut session = output.session(capability);
// sort requests for in-order cursor traversal. could consolidate?
prefixes
.sort_by(|x, y| logic2(&(x.0).0).cmp(&logic2(&(y.0).0)));
let (mut cursor, storage) = trace.cursor();
for &mut (
(ref prefix, old_count, old_index),
ref time,
ref mut diff,
) in prefixes.iter_mut()
{
if !input2.frontier.less_equal(time) {
let key = logic2(prefix);
cursor.seek_key(&storage, &key);
if cursor.get_key(&storage) == Some(&key) {
let mut count = 0;
cursor.map_times(&storage, |t, d| {
if t.less_equal(time) {
count += d;
}
});
// assert!(count >= 0);
let count = count as usize;
if count > 0 {
if count < old_count {
session.give((
(prefix.clone(), count, index),
time.clone(),
*diff,
));
} else {
session.give((
(prefix.clone(), old_count, old_index),
time.clone(),
*diff,
));
}
}
}
*diff = 0;
}
}
prefixes.retain(|ptd| ptd.2 != 0);
}
}
}
// drop fully processed capabilities.
stash.retain(|_, prefixes| !prefixes.is_empty());
// advance the consolidation frontier (TODO: wierd lexicographic times!)
if let Some(trace) = counts_trace.as_mut() {
trace.advance_by(&input1.frontier().frontier());
}
if input1.frontier().is_empty() && stash.is_empty() {
counts_trace = None;
}
}
})
.as_collection(),
)
}
fn propose(&mut self, prefixes: &Collection<S, P>) -> Collection<S, (P, V)> {
let propose = &self.propose;
let mut propose_trace = Some(propose.trace.clone());
let mut stash = HashMap::new();
let logic1 = self.key_selector.clone();
let logic2 = self.key_selector.clone();
let mut buffer1 = Vec::new();
let mut buffer2 = Vec::new();
let exchange = Exchange::new(move |update: &(P, S::Timestamp, isize)| {
logic1(&update.0).hashed().as_u64()
});
prefixes
.inner
.binary_frontier(
&propose.stream,
exchange,
Pipeline,
"Propose",
move |_, _| {
move |input1, input2, output| {
// drain the first input, stashing requests.
input1.for_each(|capability, data| {
data.swap(&mut buffer1);
stash
.entry(capability.retain())
.or_insert_with(Vec::new)
.extend(buffer1.drain(..))
});
// advance the `distinguish_since` frontier to allow all merges.
input2.for_each(|_, batches| {
batches.swap(&mut buffer2);
for batch in buffer2.drain(..) {
if let Some(ref mut trace) = propose_trace {
trace.distinguish_since(batch.upper());
}
}
});
if let Some(ref mut trace) = propose_trace {
for (capability, prefixes) in stash.iter_mut() {
// defer requests at incomplete times.
// NOTE: not all updates may be at complete times, but if this test fails then none of them are.
if !input2.frontier.less_equal(capability.time()) {
let mut session = output.session(capability);
// sort requests for in-order cursor traversal. could consolidate?
prefixes.sort_by(|x, y| logic2(&x.0).cmp(&logic2(&y.0)));
let (mut cursor, storage) = trace.cursor();
for &mut (ref prefix, ref time, ref mut diff) in
prefixes.iter_mut()
{
if !input2.frontier.less_equal(time) {
let key = logic2(prefix);
cursor.seek_key(&storage, &key);
if cursor.get_key(&storage) == Some(&key) {
while let Some(value) = cursor.get_val(&storage) {
let mut count = 0;
cursor.map_times(&storage, |t, d| {
if t.less_equal(time) {
count += d;
}
});
// assert!(count >= 0);
if count > 0 {
session.give((
(prefix.clone(), value.clone()),
time.clone(),
*diff,
));
}
cursor.step_val(&storage);
}
cursor.rewind_vals(&storage);
}
*diff = 0;
}
}
prefixes.retain(|ptd| ptd.2 != 0);
}
}
}
// drop fully processed capabilities.
stash.retain(|_, prefixes| !prefixes.is_empty());
// advance the consolidation frontier (TODO: wierd lexicographic times!)
if let Some(trace) = propose_trace.as_mut() {
trace.advance_by(&input1.frontier().frontier());
}
if input1.frontier().is_empty() && stash.is_empty() {
propose_trace = None;
}
}
},
)
.as_collection()
}
fn validate(&mut self, extensions: &Collection<S, (P, V)>) -> Collection<S, (P, V)> {
// This method takes a stream of `(prefix, time, diff)` changes, and we want to produce the corresponding
// stream of `((prefix, count), time, diff)` changes, just by looking up `count` in `count_trace`. We are
// just doing a stream of changes and a stream of look-ups, no consolidation or any funny business like
// that. We *could* organize the input differences by key and save some time, or we could skip that.
let validate = &self.validate;
let mut validate_trace = Some(validate.trace.clone());
let mut stash = HashMap::new();
let logic1 = self.key_selector.clone();
let logic2 = self.key_selector.clone();
let mut buffer1 = Vec::new();
let mut buffer2 = Vec::new();
let exchange = Exchange::new(move |update: &((P, V), S::Timestamp, isize)| {
(logic1(&(update.0).0).clone(), ((update.0).1).clone())
.hashed()
.as_u64()
});
extensions
.inner
.binary_frontier(
&validate.stream,
exchange,
Pipeline,
"Validate",
move |_, _| {
move |input1, input2, output| {
// drain the first input, stashing requests.
input1.for_each(|capability, data| {
data.swap(&mut buffer1);
stash
.entry(capability.retain())
.or_insert_with(Vec::new)
.extend(buffer1.drain(..))
});
// advance the `distinguish_since` frontier to allow all merges.
input2.for_each(|_, batches| {
batches.swap(&mut buffer2);
for batch in buffer2.drain(..) {
if let Some(ref mut trace) = validate_trace {
trace.distinguish_since(batch.upper());
}
}
});
if let Some(ref mut trace) = validate_trace {
for (capability, prefixes) in stash.iter_mut() {
// defer requests at incomplete times.
// NOTE: not all updates may be at complete times, but if this test fails then none of them are.
if !input2.frontier.less_equal(capability.time()) {
let mut session = output.session(capability);
// sort requests for in-order cursor traversal. could consolidate?
prefixes.sort_by(|x, y| {
(logic2(&(x.0).0), &((x.0).1))
.cmp(&(logic2(&(y.0).0), &((y.0).1)))
});
let (mut cursor, storage) = trace.cursor();
for &mut (ref prefix, ref time, ref mut diff) in
prefixes.iter_mut()
{
if !input2.frontier.less_equal(time) {
let key = (logic2(&prefix.0), (prefix.1).clone());
cursor.seek_key(&storage, &key);
if cursor.get_key(&storage) == Some(&key) {
let mut count = 0;
cursor.map_times(&storage, |t, d| {
if t.less_equal(time) {
count += d;
}
});
// assert!(count >= 0);
if count > 0 {
session.give((
prefix.clone(),
time.clone(),
*diff,
));
}
}
*diff = 0;
}
}
prefixes.retain(|ptd| ptd.2 != 0);
}
}
}
// drop fully processed capabilities.
stash.retain(|_, prefixes| !prefixes.is_empty());
// advance the consolidation frontier (TODO: wierd lexicographic times!)
if let Some(trace) = validate_trace.as_mut() {
trace.advance_by(&input1.frontier().frontier());
}
if input1.frontier().is_empty() && stash.is_empty() {
validate_trace = None;
}
}
},
)
.as_collection()
}
}
struct AntijoinExtender<'a, S, V, P>
where
S: Scope,
S::Timestamp: Lattice + ExchangeData,
V: ExchangeData,
{
phantom: std::marker::PhantomData<P>,
extender: Extender<'a, S, P, V>,
}
impl<'a, S, V, P> PrefixExtender<S> for AntijoinExtender<'a, S, V, P>
where
S: Scope,
S::Timestamp: Lattice + ExchangeData,
V: ExchangeData + Hash,
P: ExchangeData,
{
type Prefix = P;
type Extension = V;
fn count(
&mut self,
_prefixes: &Collection<S, (P, usize, usize)>,
_index: usize,
) -> Option<Collection<S, (P, usize, usize)>> {
None
}
fn propose(&mut self, prefixes: &Collection<S, P>) -> Collection<S, (P, V)> {
prefixes.map(|_prefix| panic!("AntijoinExtender should never be asked to propose."))
}
fn validate(&mut self, extensions: &Collection<S, (P, V)>) -> Collection<S, (P, V)> {
extensions.concat(&self.extender.validate(extensions).negate())
}
}
|
use serde_json::{Map, Value};
use std::fmt;
#[derive(Debug)]
pub struct ParseError;
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "ParseError")
}
}
impl From<serde_json::Error> for ParseError {
fn from(_: serde_json::Error) -> Self {
ParseError
}
}
#[derive(Debug, Clone)]
pub enum Message {
Call {
unique_id: String,
action: String,
data: Map<String, Value>,
},
CallResult {
unique_id: String,
data: Map<String, Value>,
},
CallError {
unique_id: String,
error_code: String,
error_description: String,
},
}
impl fmt::Display for Message {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match &self {
Message::Call {
unique_id,
action,
data,
} => write!(f, "[2, {}, {}, {:?}]", unique_id, action, data),
Message::CallResult { unique_id, data } => write!(f, "[3, {}, {:?}]", unique_id, data),
Message::CallError {
unique_id,
error_code,
error_description,
} => write!(
f,
"[3, {}, {}, {}]",
unique_id, error_code, error_description
),
}
}
}
|
mod pathfinder;
mod printer;
mod renamer;
use printer::Print;
use renamer::Renamer;
use std::io::{stdin, Read};
fn main() {
let mut renamer = Renamer::new();
if let Err(e) = renamer.start() {
Print::error(format!("Project rename failed: {}", e));
}
Print::prompt("Press Enter to exit.");
let _ = stdin().read(&mut [0u8]);
}
|
use crate::{client::StdoutWriter, requests::Request, responses::Response};
use async_trait::async_trait;
/// Trait for an debug adapter.
///
/// Adapters are the main backbone of a debug server. They get a `accept` call for each
/// incoming request. Responses are the return values of these calls.
#[async_trait]
pub trait Adapter {
/// Accept (and take ownership) of an incoming request.
///
/// This is the primary entry point for debug adapters, where deserialized requests
/// can be processed.
///
/// The `ctx` reference can be used to send events and reverse requests to the client.
///
/// # Error handling
///
/// This function always returns a valid `Response` object, however, that response
/// itself may be an error response. As such, implementors should map their errors to
/// an error response to allow clients to handle them. This is in the interest of users -
/// the debug adapter is not something that users directly interact with nor something
/// that they necessarily know about. From the users' perspective, it's an implementation
/// detail and they are using their editor to debug something.
async fn handle_request(
&mut self,
request: Request,
stdout_writer: &mut StdoutWriter,
) -> Response;
}
|
#![no_std]
extern crate alloc;
use alloc::borrow::ToOwned;
use alloc::boxed::Box;
use alloc::string::ToString;
use alloc::vec;
use alloc::vec::Vec;
use prost::Message;
extern crate tests_infra;
pub mod foo {
pub mod bar_baz {
include!(concat!(env!("OUT_DIR"), "/foo.bar_baz.rs"));
}
}
pub mod nesting {
include!(concat!(env!("OUT_DIR"), "/nesting.rs"));
}
pub mod recursive_oneof {
include!(concat!(env!("OUT_DIR"), "/recursive_oneof.rs"));
}
/// This tests the custom attributes support by abusing docs.
///
/// Docs really are full-blown attributes. So we use them to ensure we can place them on everything
/// we need. If they aren't put onto something or allowed not to be there (by the generator),
/// compilation fails.
#[deny(missing_docs)]
pub mod custom_attributes {
include!(concat!(env!("OUT_DIR"), "/foo.custom.attrs.rs"));
}
/// Also for testing custom attributes, but on oneofs.
///
/// Unfortunately, an OneOf field generates a companion module in the .rs file. There's no
/// reasonable way to place a doc comment on that, so we do the test with `derive(Ord)` and have it
/// in a separate file.
pub mod oneof_attributes {
include!(concat!(env!("OUT_DIR"), "/foo.custom.one_of_attrs.rs"));
}
/// Issue https://github.com/danburkert/prost/issues/118
///
/// When a message contains an enum field with a default value, we
/// must ensure that the appropriate name conventions are used.
pub mod default_enum_value {
include!(concat!(env!("OUT_DIR"), "/default_enum_value.rs"));
}
pub mod groups {
include!(concat!(env!("OUT_DIR"), "/groups.rs"));
}
use tests_infra::*;
use alloc::collections::{BTreeMap, BTreeSet};
use protobuf::test_messages::proto3::TestAllTypesProto3;
// Tests
mod tests {
use super::*;
pub fn test_all_types_proto3() {
// Some selected encoded messages, mostly collected from failed fuzz runs.
let msgs: &[&[u8]] = &[
&[0x28, 0x28, 0x28, 0xFF, 0xFF, 0xFF, 0xFF, 0x68],
&[0x92, 0x01, 0x00, 0x92, 0xF4, 0x01, 0x02, 0x00, 0x00],
&[0x5d, 0xff, 0xff, 0xff, 0xff, 0x28, 0xff, 0xff, 0x21],
&[0x98, 0x04, 0x02, 0x08, 0x0B, 0x98, 0x04, 0x02, 0x08, 0x02],
// optional_int32: -1
&[0x08, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x08],
// repeated_bool: [true, true]
&[0xDA, 0x02, 0x02, 0x2A, 0x03],
// oneof_double: nan
&[0xb1, 0x07, 0xf6, 0x3d, 0xf5, 0xff, 0x27, 0x3d, 0xf5, 0xff],
// optional_float: -0.0
&[0xdd, 0x00, 0x00, 0x00, 0x00, 0x80],
// optional_value: nan
&[
0xE2, 0x13, 0x1B, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11,
0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x08, 0xFF, 0x0E,
],
];
for msg in msgs {
roundtrip::<TestAllTypesProto3>(msg).unwrap();
}
}
pub fn test_ident_conversions() {
let msg = foo::bar_baz::FooBarBaz {
foo_bar_baz: 42,
fuzz_busters: vec![foo::bar_baz::foo_bar_baz::FuzzBuster {
t: BTreeMap::<i32, foo::bar_baz::FooBarBaz>::new(),
nested_self: None,
}],
p_i_e: 0,
r#as: 4,
r#break: 5,
r#const: 6,
r#continue: 7,
r#else: 8,
r#enum: 9,
r#false: 10,
r#fn: 11,
r#for: 12,
r#if: 13,
r#impl: 14,
r#in: 15,
r#let: 16,
r#loop: 17,
r#match: 18,
r#mod: 19,
r#move: 20,
r#mut: 21,
r#pub: 22,
r#ref: 23,
r#return: 24,
r#static: 25,
r#struct: 26,
r#trait: 27,
r#true: 28,
r#type: 29,
r#unsafe: 30,
r#use: 31,
r#where: 32,
r#while: 33,
r#dyn: 34,
r#abstract: 35,
r#become: 36,
r#box: 37,
r#do: 38,
r#final: 39,
r#macro: 40,
r#override: 41,
r#priv: 42,
r#typeof: 43,
r#unsized: 44,
r#virtual: 45,
r#yield: 46,
r#async: 47,
r#await: 48,
r#try: 49,
self_: 50,
super_: 51,
extern_: 52,
crate_: 53,
};
let _ = foo::bar_baz::foo_bar_baz::Self_ {};
// Test enum ident conversion.
let _ = foo::bar_baz::foo_bar_baz::StrawberryRhubarbPie::Foo;
let _ = foo::bar_baz::foo_bar_baz::StrawberryRhubarbPie::Bar;
let _ = foo::bar_baz::foo_bar_baz::StrawberryRhubarbPie::FooBar;
let _ = foo::bar_baz::foo_bar_baz::StrawberryRhubarbPie::FuzzBuster;
let _ = foo::bar_baz::foo_bar_baz::StrawberryRhubarbPie::NormalRustEnumCase;
let mut buf = Vec::new();
msg.encode(&mut buf).expect("encode");
roundtrip::<foo::bar_baz::FooBarBaz>(&buf).unwrap();
}
pub fn test_custom_type_attributes() {
// We abuse the ident conversion protobuf for the custom attribute additions. We placed
// `Ord` on the FooBarBaz (which is not implemented by ordinary messages).
let mut set1 = BTreeSet::new();
let msg1 = foo::bar_baz::FooBarBaz::default();
set1.insert(msg1);
// Similar, but for oneof fields
let mut set2 = BTreeSet::new();
let msg2 = oneof_attributes::Msg::default();
set2.insert(msg2.field);
}
pub fn test_nesting() {
use crate::nesting::{A, B};
let _ = A {
a: Some(Box::new(A::default())),
repeated_a: Vec::<A>::new(),
map_a: BTreeMap::<i32, A>::new(),
b: Some(Box::new(B::default())),
repeated_b: Vec::<B>::new(),
map_b: BTreeMap::<i32, B>::new(),
};
}
pub fn test_deep_nesting() {
fn build_and_roundtrip(depth: usize) -> Result<(), prost::DecodeError> {
use crate::nesting::A;
let mut a = Box::new(A::default());
for _ in 0..depth {
let mut next = Box::new(A::default());
next.a = Some(a);
a = next;
}
let mut buf = Vec::new();
a.encode(&mut buf).unwrap();
A::decode(&buf[..]).map(|_| ())
}
assert!(build_and_roundtrip(100).is_ok());
assert!(build_and_roundtrip(101).is_err());
}
pub fn test_deep_nesting_oneof() {
fn build_and_roundtrip(depth: usize) -> Result<(), prost::DecodeError> {
use crate::recursive_oneof::{a, A, C};
let mut a = Box::new(A {
kind: Some(a::Kind::C(C {})),
});
for _ in 0..depth {
a = Box::new(A {
kind: Some(a::Kind::A(a)),
});
}
let mut buf = Vec::new();
a.encode(&mut buf).unwrap();
A::decode(&buf[..]).map(|_| ())
}
assert!(build_and_roundtrip(99).is_ok());
assert!(build_and_roundtrip(100).is_err());
}
pub fn test_deep_nesting_group() {
fn build_and_roundtrip(depth: usize) -> Result<(), prost::DecodeError> {
use crate::groups::{nested_group2::OptionalGroup, NestedGroup2};
let mut a = NestedGroup2::default();
for _ in 0..depth {
a = NestedGroup2 {
optionalgroup: Some(Box::new(OptionalGroup {
nested_group: Some(a),
})),
};
}
let mut buf = Vec::new();
a.encode(&mut buf).unwrap();
NestedGroup2::decode(&buf[..]).map(|_| ())
}
assert!(build_and_roundtrip(50).is_ok());
assert!(build_and_roundtrip(51).is_err());
}
pub fn test_deep_nesting_repeated() {
fn build_and_roundtrip(depth: usize) -> Result<(), prost::DecodeError> {
use crate::nesting::C;
let mut c = C::default();
for _ in 0..depth {
let mut next = C::default();
next.r.push(c);
c = next;
}
let mut buf = Vec::new();
c.encode(&mut buf).unwrap();
C::decode(&buf[..]).map(|_| ())
}
assert!(build_and_roundtrip(100).is_ok());
assert!(build_and_roundtrip(101).is_err());
}
pub fn test_deep_nesting_map() {
fn build_and_roundtrip(depth: usize) -> Result<(), prost::DecodeError> {
use crate::nesting::D;
let mut d = D::default();
for _ in 0..depth {
let mut next = D::default();
next.m.insert("foo".to_owned(), d);
d = next;
}
let mut buf = Vec::new();
d.encode(&mut buf).unwrap();
D::decode(&buf[..]).map(|_| ())
}
assert!(build_and_roundtrip(50).is_ok());
assert!(build_and_roundtrip(51).is_err());
}
pub fn test_recursive_oneof() {
use crate::recursive_oneof::{a, A, B, C};
let _ = A {
kind: Some(a::Kind::B(Box::new(B {
a: Some(Box::new(A {
kind: Some(a::Kind::C(C {})),
})),
}))),
};
}
pub fn test_default_enum() {
let msg = default_enum_value::Test::default();
assert_eq!(msg.privacy_level_1(), default_enum_value::PrivacyLevel::One);
assert_eq!(
msg.privacy_level_3(),
default_enum_value::PrivacyLevel::PrivacyLevelThree
);
assert_eq!(
msg.privacy_level_4(),
default_enum_value::PrivacyLevel::PrivacyLevelprivacyLevelFour
);
}
pub fn test_group() {
// optional group
let msg1_bytes = &[0x0B, 0x10, 0x20, 0x0C];
let msg1 = groups::Test1 {
groupa: Some(groups::test1::GroupA { i2: Some(32) }),
};
let mut bytes = Vec::new();
msg1.encode(&mut bytes).unwrap();
assert_eq!(&bytes, msg1_bytes);
// skip group while decoding
let data: &[u8] = &[
0x0B, // start group (tag=1)
0x30, 0x01, // unused int32 (tag=6)
0x2B, 0x30, 0xFF, 0x01, 0x2C, // unused group (tag=5)
0x10, 0x20, // int32 (tag=2)
0x0C, // end group (tag=1)
];
assert_eq!(groups::Test1::decode(data), Ok(msg1));
// repeated group
let msg2_bytes: &[u8] = &[
0x20, 0x40, 0x2B, 0x30, 0xFF, 0x01, 0x2C, 0x2B, 0x30, 0x01, 0x2C, 0x38, 0x64,
];
let msg2 = groups::Test2 {
i14: Some(64),
groupb: vec![
groups::test2::GroupB { i16: Some(255) },
groups::test2::GroupB { i16: Some(1) },
],
i17: Some(100),
};
let mut bytes = Vec::new();
msg2.encode(&mut bytes).unwrap();
assert_eq!(&*bytes, msg2_bytes);
assert_eq!(groups::Test2::decode(msg2_bytes), Ok(msg2));
}
pub fn test_group_oneof() {
let msg = groups::OneofGroup {
i1: Some(42),
field: Some(groups::oneof_group::Field::S2("foo".to_string())),
};
check_message(&msg);
let msg = groups::OneofGroup {
i1: Some(42),
field: Some(groups::oneof_group::Field::G(groups::oneof_group::G {
i2: None,
s1: "foo".to_string(),
t1: None,
})),
};
check_message(&msg);
let msg = groups::OneofGroup {
i1: Some(42),
field: Some(groups::oneof_group::Field::G(groups::oneof_group::G {
i2: Some(99),
s1: "foo".to_string(),
t1: Some(groups::Test1 {
groupa: Some(groups::test1::GroupA { i2: None }),
}),
})),
};
check_message(&msg);
check_message(&groups::OneofGroup::default());
}
}
fn main() {
use tests::*;
test_all_types_proto3();
test_ident_conversions();
test_custom_type_attributes();
test_nesting();
test_deep_nesting();
test_deep_nesting_oneof();
test_deep_nesting_group();
test_deep_nesting_repeated();
test_deep_nesting_map();
test_recursive_oneof();
test_default_enum();
test_group();
test_group_oneof();
}
|
pub static STDLIB :&[u8] = b"
#ifndef _ENPPSTD_
#define _ENPPSTD_
#include <type_traits>
#include <algorithm>
#include <iostream>
#include <fstream>
#include <numeric>
#include <vector>
#include <string>
#include <thread>
#include <future>
#include <chrono>
#include <regex>
#include <tuple>
#include <map>
#ifdef __cpp_lib_ranges
#include <ranges>
#endif
#define jthread(t) std::thread((t)).join()
#define dthread(t) std::thread((t)).detach()
struct __instead_of_void { friend std::ostream& operator<<(std::ostream& os, __instead_of_void _) { return os << \"void\"; }};
template<class T> struct __gt { using t = T; };
template<> struct __gt<void> { using t = __instead_of_void; };
template<class T> using __gt_t=typename __gt<T>::t;
#define __into_f(e) ([&](){return e;})
#define safe(e) __vr1(__into_f(e))
#define print_safe(e) __pr1(__into_f(e))
template<class T>void _print(T e) {std::cout << print_safe(e);}
template<class F>inline constexpr auto __vr1(const F& e) { if constexpr (std::is_same<decltype(e()), void>::value){e(); return __instead_of_void();} else return e(); }
template <class T>struct __lv {template <class U>constexpr static auto less_than_test(const U u) -> decltype(std::cout << u, char(0)){return 0;}constexpr static int less_than_test(...) {return 0;}
constexpr static const bool value = (sizeof(less_than_test(std::declval<T>())) == 1);};
template <class T>std::string __pa(T& e) { std::ostringstream oss; oss << &e; return oss.str(); }
template <class T>std::string __pa(T&& e) { return \"rvalue\"; }
template<class F>inline constexpr auto __pr1(const F& e) {if
constexpr (std::is_same<decltype(e()), void>::value) { e(); return __instead_of_void(); }else if constexpr (!__lv<decltype(e())>::value) {auto&&v=e();std::ostringstream oss;oss<<\"(\"<<
typeid(e()).name() << \":\" << sizeof(decltype(e())) <<__pa(v)<<\")\";return oss.str();}else return e();}
struct __constructor{__constructor(){std::cout.tie(0);std::ios_base::sync_with_stdio(0);std::cout<<std::boolalpha;}}__Construct;
typedef char i1; typedef short i2; typedef long i4; typedef long long i8;
typedef unsigned char u1; typedef unsigned short u2; typedef unsigned long u4; typedef unsigned long long u8;
typedef float f4; typedef double f8; typedef long double ld;
typedef const char ci1; typedef const unsigned char cu1; typedef const short ci2; typedef const long ci4; typedef const long long ci8;
typedef const unsigned short cu2; typedef const unsigned long cu4; typedef const unsigned long long cu8;
typedef const float cf4; typedef const double cf8; typedef const long double cld;
using std::vector; using std::string; using std::stoi; using namespace std::string_literals; using namespace std::chrono_literals;using std::async;using std::move;
template<class F, class...T>void get_time(F f, T...a) {
auto st = std::chrono::system_clock::now(); f(a...); std::chrono::duration<double>t = std::chrono::system_clock::now() - st; std::cout << t.count() << \" second(s) spent.\" << std::endl;}
std::string input_line(std::string a = \"\") { std::string b; std::cout << a; getline(std::cin, b); return b; }
std::string input(std::string a = \"\") { std::string b; std::cout << a; std::cin >> b; return b; }
std::string static_input(int etag, std::string a = \"\") { static std::map<int, std::string>memoi; if (memoi.count(etag)) { return memoi[etag]; } std::string b; std::cout << a; std::cin >> b; memoi.insert(std::make_pair(etag, b)); return b; }
std::string static_input_line(int etag, std::string a = \"\") { static std::map<int, std::string>memoi; if (memoi.count(etag)) { return memoi[etag]; } std::string b; std::cout << a; getline(std::cin, b); memoi.insert(std::make_pair(etag, b)); return b; }
template<class...T>auto tup(T...arg)->std::tuple<T...> { return std::tuple<T...>(arg...); }
template<class T>class __folder {
public:T c; template<class E>__folder& operator<< (E a) { c.push_back(a); return*this; }};
class __fold_printer {public:template<class E>__fold_printer& operator<< (const E&a) { _print(a);return*this; }};
template<class...T>void print(const T&...arg) { __fold_printer e;(e<< ... <<arg); }
template<class...T>void println(const T&...arg) { __fold_printer e;(e<< ... <<arg); std::cout << std::endl; }
template<class T, class...R>class __gft { public:typedef T CORE; };
template<class...T>std::vector<typename __gft<T...>::CORE> vec(T...arg) { __folder<std::vector<typename __gft<T...>::CORE>> r; return (r << ... << arg).c; }
template<class T>std::string make_string(T a) { std::stringstream k; k << a; return k.str(); }
template<class T, class F>auto map(T c, F f)->std::vector<decltype(f(*c.begin()))> { std::vector<decltype(f(*c.begin()))>g; for (const auto& i : c) { g.push_back(f(i)); }return g; }
template<class T, class F>void each(T c, F f) { std::for_each(c.begin(), c.end(), f); }
template<class T, class F>auto filter(T c, F f)->std::vector<typename T::value_type> { std::vector<typename T::value_type>a; for (const auto& i : c)if (f(i))a.push_back(i); return a; }
template<typename T, typename F>auto integrate(T c, F f)->std::vector<decltype(f(*c.begin(), *c.begin()))> { typedef typename T::value_type vt; auto iter = c.begin(); vt rdc = *iter; std::vector<vt>ret;ret.push_back(rdc); iter++;
for (; iter != c.end(); iter++) { rdc = f(rdc, *iter); ret.push_back(rdc); }return ret; }
template<typename T, typename F, typename vt = typename T::value_type>auto fold(const T & c, const F & f)->decltype(f(*c.begin(), *c.begin())) { auto iter = c.begin(); vt rdc = *iter; iter++; std::vector<vt>ret; for (; iter != c.end(); iter++) { rdc = f(rdc, *iter); }return rdc; }
template<typename T, typename F, typename vt>auto bfold(const vt & d, const T & c, const F & f)->decltype(f(vt(), *c.begin())) { auto iter = c.begin(); vt rdc = d; std::vector<vt>ret; for (; iter != c.end(); iter++) { rdc = f(rdc, *iter); }return rdc; }
template<class T1, class T2>std::vector<typename T1::value_type> cat(T1 a, T2 b) { std::vector<typename T1::value_type> ret(a.begin(), a.end()); ret.insert(ret.end(), b.begin(), b.end()); return ret; }
template<class T, class F = std::less<typename T::value_type>>typename T::iterator max(T&& e, F f = F()) {auto first = e.begin();auto last = e.end();if (first == last) return last;typename T::iterator largest = first;++first;
for (; first != last; ++first) {if (f(*largest, *first)) {largest = first;}}return largest;}
template<class T>typename T::value_type sum(T&& c) {return std::accumulate(c.begin(), c.end(), typename T::value_type());}
template<class T>T wait(T s) { std::this_thread::sleep_for(s); return s; }
class range {private:int start; int End; int diff; public:typedef int value_type;typedef const int& const_reference;typedef int&reference;
range(int _end) { start = 0; End = _end; diff = 1; }
range(int _start, int _end, int _diff = 1) { start = _start; End = _end; diff = _diff; }
class iterator {
private:int _diff; public:int _val; iterator(int v, int d) :_val(v), _diff(d) {}
auto operator++()->iterator& { _val += _diff; return *this; }
inline int operator*() { return _val; }
int operator==(iterator i) { return (i._val == _val); }
int operator!=(iterator i) { return (i._val >= _val + _diff); }};
inline auto begin()->iterator { return iterator(start, diff); }
inline auto end()->iterator { return iterator(End + diff, diff); }};
std::pair<std::string, std::string> pr(const std::string& a, const std::string& b) {return std::pair<std::string, std::string>(a, b);}
inline range until(i4 a, i4 b) { return range(a, b); }
#ifdef __cpp_lib_ranges
namespace srv = std::ranges::views;
namespace sr = std::ranges;
//using namespace srv;
//using namespace sr;
#endif
#endif
";
|
#![no_std]
extern crate rand;
use rand::SeedableRng;
use rand::rngs::SmallRng;
use rand::distributions::{Distribution, Bernoulli};
/// This test should make sure that we don't accidentally have undefined
/// behavior for large propabilties due to
/// https://github.com/rust-lang/rust/issues/10184.
/// Expressions like `1.0*(u64::MAX as f64) as u64` have to be avoided.
#[test]
fn large_probability() {
let p = 1. - ::core::f64::EPSILON / 2.;
assert!(p < 1.);
let d = Bernoulli::new(p);
let mut rng = SmallRng::from_seed(
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]);
for _ in 0..10 {
assert!(d.sample(&mut rng), "extremely unlikely to fail by accident");
}
}
|
use std::io::prelude::*;
use serde::{Serialize, ser, serde_if_integer128};
use std::fmt::Display;
use super::write::*;
pub struct MCProtoSerializer<W: Write> {
pub writer: W
}
impl<W: Write> MCProtoSerializer<W> {
/// Creates a new Serializer with the given `Write`r.
pub fn new(w: W) -> MCProtoSerializer<W> {
MCProtoSerializer {
writer: w
}
}
}
impl<'a, W: Write> serde::Serializer for &'a mut MCProtoSerializer<W> {
type Ok = ();
type Error = crate::error::Error;
type SerializeSeq = ser::Impossible<(), Self::Error>;
type SerializeTuple = ser::Impossible<(), Self::Error>;
type SerializeTupleStruct = ser::Impossible<(), Self::Error>;
type SerializeTupleVariant = ser::Impossible<(), Self::Error>;
type SerializeMap = ser::Impossible<(), Self::Error>;
type SerializeStruct = Compound<'a, W>;
type SerializeStructVariant = Compound<'a, W>;
fn serialize_bool(self, v: bool) -> Result<Self::Ok, Self::Error> {
write_bool(&v, &mut self.writer)?;
Ok(())
}
fn serialize_i8(self, v: i8) -> Result<Self::Ok, Self::Error> {
write_i8(&v, &mut self.writer);
Ok(())
}
fn serialize_i16(self, v: i16) -> Result<Self::Ok, Self::Error> {
write_i16(&v, &mut self.writer);
Ok(())
}
fn serialize_i32(self, v: i32) -> Result<Self::Ok, Self::Error> {
write_i32(&v, &mut self.writer);
Ok(())
}
fn serialize_i64(self, v: i64) -> Result<Self::Ok, Self::Error> {
write_i64(&v, &mut self.writer);
Ok(())
}
fn serialize_u8(self, v: u8) -> Result<Self::Ok, Self::Error> {
write_u8(&v, &mut self.writer);
Ok(())
}
fn serialize_u16(self, v: u16) -> Result<Self::Ok, Self::Error> {
write_u16(&v, &mut self.writer);
Ok(())
}
fn serialize_u32(self, v: u32) -> Result<Self::Ok, Self::Error> {
write_u32(&v, &mut self.writer);
Ok(())
}
fn serialize_u64(self, v: u64) -> Result<Self::Ok, Self::Error> {
write_u64(&v, &mut self.writer);
Ok(())
}
fn serialize_f32(self, v: f32) -> Result<Self::Ok, Self::Error> {
write_f32(&v, &mut self.writer);
Ok(())
}
fn serialize_f64(self, v: f64) -> Result<Self::Ok, Self::Error> {
write_f64(&v, &mut self.writer);
Ok(())
}
serde_if_integer128! {
fn serialize_u128(self, v: u128) -> Result<Self::Ok, Self::Error> {
write_u128(&v, &mut self.writer);
Ok(())
}
}
fn serialize_char(self, v: char) -> Result<Self::Ok, Self::Error> {
unimplemented!()
}
fn serialize_str(self, val: &str) -> Result<Self::Ok, Self::Error> {
write_String(&val, &mut self.writer)
}
fn serialize_bytes(self, value: &[u8]) -> Result<Self::Ok, Self::Error> {
self.writer.write(value);
Ok(())//TODO handle
}
fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
self.writer.write(&[0xff]);
Ok(())//TODO handle
}
fn serialize_some<T: ?Sized>(self, value: &T) -> Result<Self::Ok, Self::Error> where
T: Serialize {
unimplemented!()
}
fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
unimplemented!()
}
fn serialize_unit_struct(self, name: &'static str) -> Result<Self::Ok, Self::Error> {
unimplemented!()
}
fn serialize_unit_variant(self, name: &'static str, variant_index: u32, variant: &'static str) -> Result<Self::Ok, Self::Error> {
write_varint(&(variant_index as i32), &mut self.writer)
}
fn serialize_newtype_struct<T: ?Sized>(self, name: &'static str, value: &T) -> Result<Self::Ok, Self::Error> where
T: Serialize {
unimplemented!()
}
fn serialize_newtype_variant<T: ?Sized>(self, name: &'static str, variant_index: u32, variant: &'static str, value: &T) -> Result<Self::Ok, Self::Error> where
T: Serialize {
unimplemented!()
}
fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
unimplemented!()
}
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
unimplemented!()
}
fn serialize_tuple_struct(self, name: &'static str, len: usize) -> Result<Self::SerializeTupleStruct, Self::Error> {
unimplemented!()
}
fn serialize_tuple_variant(self, name: &'static str, variant_index: u32, variant: &'static str, len: usize) -> Result<Self::SerializeTupleVariant, Self::Error> {
unimplemented!()
}
fn serialize_map(self, len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
unimplemented!()
}
fn serialize_struct(self, name: &'static str, len: usize) -> Result<Self::SerializeStruct, Self::Error> {
Ok(Compound { ser: self })
}
fn serialize_struct_variant(self, name: &'static str, variant_index: u32, variant: &'static str, len: usize) -> Result<Self::SerializeStructVariant, Self::Error> {
write_varint(&(variant_index as i32), &mut self.writer);
Ok(Compound { ser: self })
}
fn collect_str<T: ?Sized>(self, value: &T) -> Result<Self::Ok, Self::Error> where
T: Display {
unimplemented!()
}
}
pub struct Compound<'a, W: 'a + Write> {
ser: &'a mut MCProtoSerializer<W>,
}
impl<'a, W> serde::ser::SerializeStruct for Compound<'a, W>
where
W: Write
{
type Ok = ();
type Error = crate::error::Error;
#[inline]
fn serialize_field<T: ?Sized>(&mut self, _key: &'static str, value: &T) -> Result<(), Self::Error>
where
T: serde::ser::Serialize,
{
value.serialize(&mut *self.ser)
}
#[inline]
fn end(self) -> Result<(), Self::Error> {
Ok(())
}
}
impl<'a, W> serde::ser::SerializeStructVariant for Compound<'a, W>
where
W: Write
{
type Ok = ();
type Error = crate::error::Error;
#[inline]
fn serialize_field<T: ?Sized>(&mut self, _key: &'static str, value: &T) -> Result<(), Self::Error>
where
T: serde::ser::Serialize,
{
value.serialize(&mut *self.ser)
}
#[inline]
fn end(self) -> Result<(), Self::Error> {
Ok(())
}
}
#[cfg(test)]
mod tests {}
|
//#[macro_use] extern crate log;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate serenity;
extern crate requests;
extern crate typemap;
mod commands;
mod shared;
use serenity::framework::standard::{DispatchError, StandardFramework, HelpBehaviour, help_commands};
use serenity::http;
use serenity::prelude::*;
use std::collections::HashMap;
use std::sync::Arc;
use std::collections::HashSet;
use std::env;
use shared::shared_structs::Handler;
use shared::shared_structs::CommandCounter;
use shared::shared_structs::ShardManagerContainer;
lazy_static! {
static ref RUSTY_TOKEN: String = env::var("RUSTY_TOKEN").unwrap();
}
fn main() {
// Login with a bot token from the environment
let mut client = Client::new(&RUSTY_TOKEN, Handler).expect("Err creating client");
{
let mut data = client.data.lock();
data.insert::<CommandCounter>(HashMap::default());
data.insert::<ShardManagerContainer>(Arc::clone(&client.shard_manager));
}
let owners = match http::get_current_application_info() {
Ok(info) => {
let mut set = HashSet::new();
set.insert(info.owner.id);
set
}
Err(why) => panic!("Couldn't get application info: {:?}", why),
};
client.with_framework(
StandardFramework::new()
.configure(|c| c.owners(owners).prefix("~"))
.before(|ctx, msg, command_name| {
println!(
"Got command '{}' by user '{}'",
command_name, msg.author.name
);
let mut data = ctx.data.lock();
let counter = data.get_mut::<CommandCounter>().unwrap();
let entry = counter.entry(command_name.to_string()).or_insert(0);
*entry += 1;
true // if `before` returns false, command processing doesn't happen.
})
.after(|_, _, command_name, error| match error {
Ok(()) => println!("Processed command '{}'", command_name),
Err(why) => println!("Command '{}' returned error {:?}", command_name, why),
})
.on_dispatch_error(|_ctx, msg, error| {
if let DispatchError::RateLimited(seconds) = error {
let _ = msg.channel_id.say(&format!("Try this again in {} seconds.", seconds));
}
})
.customised_help(help_commands::with_embeds, |c| {
c.individual_command_tip("Hello! \n\
If you want more information about a specific command, just pass the command as argument.")
.command_not_found_text("Could not {}, I'm sorry : (")
.suggestion_text("How about this command: {}")
.lacking_permissions(HelpBehaviour::Hide)
.lacking_role(HelpBehaviour::Nothing)
.wrong_channel(HelpBehaviour::Strike)
})
.group("Utility", |g| {
g.command("latency", |c| {
c.desc("Responds with the latency of the server.")
.cmd(commands::meta::latency)
})
.command("version", |c| {
c.desc("Responds with the version number of the currently running bot.")
.cmd(commands::meta::version)
})
.command("info", |c| {
c.desc("Responds with the details of the bot.")
.cmd(commands::meta::info)
})
})
.group("World of Warcraft", |g| {
g.command("realm", |c| {
c.desc("Responds with the status of the specified realm.")
.cmd(commands::wow::realm)
})
})
// .group("Owner only", |g| {
// g.owners_only(true).command("quit", |c| {
// c.desc("Shuts down the bot(owner only command).")
// .exec(commands::owner::quit)
// })
// }),
);
if let Err(why) = client.start() {
panic!("Client error: {:?}", why);
}
}
|
use std::fmt::Debug;
use std::str::FromStr;
#[allow(dead_code)]
#[allow(deprecated)]
fn read_line_from_stdin() -> String {
let mut buffer = String::new();
std::io::stdin().read_line(&mut buffer).unwrap();
buffer.trim_right().to_owned()
}
#[allow(dead_code)]
#[allow(deprecated)]
fn parse_line_to_single<T>() -> T
where
T: FromStr,
T::Err: Debug,
{
let mut buffer = String::new();
std::io::stdin().read_line(&mut buffer).unwrap();
T::from_str(buffer.trim_right()).unwrap()
}
#[allow(dead_code)]
#[allow(deprecated)]
fn parse_line_to_multiple<T>() -> Vec<T>
where
T: FromStr,
T::Err: Debug,
{
let mut buffer = String::new();
std::io::stdin().read_line(&mut buffer).unwrap();
buffer
.trim_right()
.split_whitespace()
.flat_map(|s| T::from_str(s))
.collect()
}
macro_rules! parse_line {
[$( $x:tt : $t:ty ),+] => {
//declare variables
$( let $x: $t; )+
{
use std::str::FromStr;
// read
let mut buf = String::new();
std::io::stdin().read_line(&mut buf).unwrap();
#[allow(deprecated)]
let mut splits = buf.trim_right().split_whitespace();
// assign each variable
$(
$x = splits.next().and_then(|s| <$t>::from_str(s).ok()).unwrap();
)+
// all strs should be used for assignment
assert!(splits.next().is_none());
}
};
}
#[derive(Debug, Clone, PartialEq)]
struct Book {
price: usize,
skills: Vec<usize>,
}
fn pow(base: usize, mut power: usize) -> usize {
let mut value = base;
while power > 1 {
value *= base;
power -= 1;
}
value
}
fn main() {
parse_line![book_count: usize, algol_count: usize, desired_skill: usize];
let books = {
let mut books = vec![];
for _ in 0..book_count {
let line = parse_line_to_multiple::<usize>();
books.push(Book {
price: line[0],
skills: line[1..].to_vec(),
});
}
books
};
let mut lowest_price = None;
for i in 0..pow(2, book_count) + 1 {
let is_selecteds = (0..book_count).map(|b| i & (1 << b) != 0);
let selected_books = books
.iter()
.zip(is_selecteds)
.filter(|&(_, is_selected)| is_selected)
.map(|(book, _)| book)
.collect::<Vec<_>>();
let mut obtained_skills = vec![0; algol_count];
for book in selected_books.iter() {
for (j, skill) in book.skills.iter().enumerate() {
obtained_skills[j] += *skill;
}
}
let is_achieved = obtained_skills.into_iter().all(|s| s >= desired_skill);
if is_achieved {
let price_sum = selected_books.iter().map(|b| b.price).sum::<usize>();
match lowest_price {
Some(p) if p > price_sum => {
lowest_price = Some(price_sum);
}
None => {
lowest_price = Some(price_sum);
}
_ => {}
}
}
}
match lowest_price {
Some(p) => println!("{}", p),
None => println!("-1",),
}
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
pub enum VoilaMessage {
ReplicaConnectivityToggled(u32),
}
|
#[cfg(unix)]
use std::os::unix::prelude::*;
#[cfg(windows)]
use std::os::windows::prelude::*;
use std::borrow::Cow;
use std::fmt;
use std::fs;
use std::io;
use std::iter;
use std::iter::repeat;
use std::mem;
use std::path::{Component, Path, PathBuf};
use std::str;
use crate::other;
use crate::EntryType;
/// Representation of the header of an entry in an archive
#[repr(C)]
#[allow(missing_docs)]
pub struct Header {
bytes: [u8; 512],
}
/// Declares the information that should be included when filling a Header
/// from filesystem metadata.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[non_exhaustive]
pub enum HeaderMode {
/// All supported metadata, including mod/access times and ownership will
/// be included.
Complete,
/// Only metadata that is directly relevant to the identity of a file will
/// be included. In particular, ownership and mod/access times are excluded.
Deterministic,
}
/// Representation of the header of an entry in an archive
#[repr(C)]
#[allow(missing_docs)]
pub struct OldHeader {
pub name: [u8; 100],
pub mode: [u8; 8],
pub uid: [u8; 8],
pub gid: [u8; 8],
pub size: [u8; 12],
pub mtime: [u8; 12],
pub cksum: [u8; 8],
pub linkflag: [u8; 1],
pub linkname: [u8; 100],
pub pad: [u8; 255],
}
/// Representation of the header of an entry in an archive
#[repr(C)]
#[allow(missing_docs)]
pub struct UstarHeader {
pub name: [u8; 100],
pub mode: [u8; 8],
pub uid: [u8; 8],
pub gid: [u8; 8],
pub size: [u8; 12],
pub mtime: [u8; 12],
pub cksum: [u8; 8],
pub typeflag: [u8; 1],
pub linkname: [u8; 100],
// UStar format
pub magic: [u8; 6],
pub version: [u8; 2],
pub uname: [u8; 32],
pub gname: [u8; 32],
pub dev_major: [u8; 8],
pub dev_minor: [u8; 8],
pub prefix: [u8; 155],
pub pad: [u8; 12],
}
/// Representation of the header of an entry in an archive
#[repr(C)]
#[allow(missing_docs)]
pub struct GnuHeader {
pub name: [u8; 100],
pub mode: [u8; 8],
pub uid: [u8; 8],
pub gid: [u8; 8],
pub size: [u8; 12],
pub mtime: [u8; 12],
pub cksum: [u8; 8],
pub typeflag: [u8; 1],
pub linkname: [u8; 100],
// GNU format
pub magic: [u8; 6],
pub version: [u8; 2],
pub uname: [u8; 32],
pub gname: [u8; 32],
pub dev_major: [u8; 8],
pub dev_minor: [u8; 8],
pub atime: [u8; 12],
pub ctime: [u8; 12],
pub offset: [u8; 12],
pub longnames: [u8; 4],
pub unused: [u8; 1],
pub sparse: [GnuSparseHeader; 4],
pub isextended: [u8; 1],
pub realsize: [u8; 12],
pub pad: [u8; 17],
}
/// Description of the header of a spare entry.
///
/// Specifies the offset/number of bytes of a chunk of data in octal.
#[repr(C)]
#[allow(missing_docs)]
pub struct GnuSparseHeader {
pub offset: [u8; 12],
pub numbytes: [u8; 12],
}
/// Representation of the entry found to represent extended GNU sparse files.
///
/// When a `GnuHeader` has the `isextended` flag set to `1` then the contents of
/// the next entry will be one of these headers.
#[repr(C)]
#[allow(missing_docs)]
pub struct GnuExtSparseHeader {
pub sparse: [GnuSparseHeader; 21],
pub isextended: [u8; 1],
pub padding: [u8; 7],
}
impl Header {
/// Creates a new blank GNU header.
///
/// The GNU style header is the default for this library and allows various
/// extensions such as long path names, long link names, and setting the
/// atime/ctime metadata attributes of files.
pub fn new_gnu() -> Header {
let mut header = Header { bytes: [0; 512] };
unsafe {
let gnu = cast_mut::<_, GnuHeader>(&mut header);
gnu.magic = *b"ustar ";
gnu.version = *b" \0";
}
header.set_mtime(0);
header
}
/// Creates a new blank UStar header.
///
/// The UStar style header is an extension of the original archive header
/// which enables some extra metadata along with storing a longer (but not
/// too long) path name.
///
/// UStar is also the basis used for pax archives.
pub fn new_ustar() -> Header {
let mut header = Header { bytes: [0; 512] };
unsafe {
let gnu = cast_mut::<_, UstarHeader>(&mut header);
gnu.magic = *b"ustar\0";
gnu.version = *b"00";
}
header.set_mtime(0);
header
}
/// Creates a new blank old header.
///
/// This header format is the original archive header format which all other
/// versions are compatible with (e.g. they are a superset). This header
/// format limits the path name limit and isn't able to contain extra
/// metadata like atime/ctime.
pub fn new_old() -> Header {
let mut header = Header { bytes: [0; 512] };
header.set_mtime(0);
header
}
fn is_ustar(&self) -> bool {
let ustar = unsafe { cast::<_, UstarHeader>(self) };
ustar.magic[..] == b"ustar\0"[..] && ustar.version[..] == b"00"[..]
}
fn is_gnu(&self) -> bool {
let ustar = unsafe { cast::<_, UstarHeader>(self) };
ustar.magic[..] == b"ustar "[..] && ustar.version[..] == b" \0"[..]
}
/// View this archive header as a raw "old" archive header.
///
/// This view will always succeed as all archive header formats will fill
/// out at least the fields specified in the old header format.
pub fn as_old(&self) -> &OldHeader {
unsafe { cast(self) }
}
/// Same as `as_old`, but the mutable version.
pub fn as_old_mut(&mut self) -> &mut OldHeader {
unsafe { cast_mut(self) }
}
/// View this archive header as a raw UStar archive header.
///
/// The UStar format is an extension to the tar archive format which enables
/// longer pathnames and a few extra attributes such as the group and user
/// name.
///
/// This cast may not succeed as this function will test whether the
/// magic/version fields of the UStar format have the appropriate values,
/// returning `None` if they aren't correct.
pub fn as_ustar(&self) -> Option<&UstarHeader> {
if self.is_ustar() {
Some(unsafe { cast(self) })
} else {
None
}
}
/// Same as `as_ustar_mut`, but the mutable version.
pub fn as_ustar_mut(&mut self) -> Option<&mut UstarHeader> {
if self.is_ustar() {
Some(unsafe { cast_mut(self) })
} else {
None
}
}
/// View this archive header as a raw GNU archive header.
///
/// The GNU format is an extension to the tar archive format which enables
/// longer pathnames and a few extra attributes such as the group and user
/// name.
///
/// This cast may not succeed as this function will test whether the
/// magic/version fields of the GNU format have the appropriate values,
/// returning `None` if they aren't correct.
pub fn as_gnu(&self) -> Option<&GnuHeader> {
if self.is_gnu() {
Some(unsafe { cast(self) })
} else {
None
}
}
/// Same as `as_gnu`, but the mutable version.
pub fn as_gnu_mut(&mut self) -> Option<&mut GnuHeader> {
if self.is_gnu() {
Some(unsafe { cast_mut(self) })
} else {
None
}
}
/// Treats the given byte slice as a header.
///
/// Panics if the length of the passed slice is not equal to 512.
pub fn from_byte_slice(bytes: &[u8]) -> &Header {
assert_eq!(bytes.len(), mem::size_of::<Header>());
assert_eq!(mem::align_of_val(bytes), mem::align_of::<Header>());
unsafe { &*(bytes.as_ptr() as *const Header) }
}
/// Returns a view into this header as a byte array.
pub fn as_bytes(&self) -> &[u8; 512] {
&self.bytes
}
/// Returns a view into this header as a byte array.
pub fn as_mut_bytes(&mut self) -> &mut [u8; 512] {
&mut self.bytes
}
/// Blanket sets the metadata in this header from the metadata argument
/// provided.
///
/// This is useful for initializing a `Header` from the OS's metadata from a
/// file. By default, this will use `HeaderMode::Complete` to include all
/// metadata.
pub fn set_metadata(&mut self, meta: &fs::Metadata) {
self.fill_from(meta, HeaderMode::Complete);
}
/// Sets only the metadata relevant to the given HeaderMode in this header
/// from the metadata argument provided.
pub fn set_metadata_in_mode(&mut self, meta: &fs::Metadata, mode: HeaderMode) {
self.fill_from(meta, mode);
}
/// Returns the size of entry's data this header represents.
///
/// This is different from `Header::size` for sparse files, which have
/// some longer `size()` but shorter `entry_size()`. The `entry_size()`
/// listed here should be the number of bytes in the archive this header
/// describes.
///
/// May return an error if the field is corrupted.
pub fn entry_size(&self) -> io::Result<u64> {
num_field_wrapper_from(&self.as_old().size).map_err(|err| {
io::Error::new(
err.kind(),
format!("{} when getting size for {}", err, self.path_lossy()),
)
})
}
/// Returns the file size this header represents.
///
/// May return an error if the field is corrupted.
pub fn size(&self) -> io::Result<u64> {
if self.entry_type().is_gnu_sparse() {
self.as_gnu()
.ok_or_else(|| other("sparse header was not a gnu header"))
.and_then(|h| h.real_size())
} else {
self.entry_size()
}
}
/// Encodes the `size` argument into the size field of this header.
pub fn set_size(&mut self, size: u64) {
num_field_wrapper_into(&mut self.as_old_mut().size, size);
}
/// Returns the raw path name stored in this header.
///
/// This method may fail if the pathname is not valid Unicode and this is
/// called on a Windows platform.
///
/// Note that this function will convert any `\` characters to directory
/// separators.
pub fn path(&self) -> io::Result<Cow<Path>> {
bytes2path(self.path_bytes())
}
/// Returns the pathname stored in this header as a byte array.
///
/// This function is guaranteed to succeed, but you may wish to call the
/// `path` method to convert to a `Path`.
///
/// Note that this function will convert any `\` characters to directory
/// separators.
pub fn path_bytes(&self) -> Cow<[u8]> {
if let Some(ustar) = self.as_ustar() {
ustar.path_bytes()
} else {
let name = truncate(&self.as_old().name);
Cow::Borrowed(name)
}
}
/// Gets the path in a "lossy" way, used for error reporting ONLY.
fn path_lossy(&self) -> String {
String::from_utf8_lossy(&self.path_bytes()).to_string()
}
/// Sets the path name for this header.
///
/// This function will set the pathname listed in this header, encoding it
/// in the appropriate format. May fail if the path is too long or if the
/// path specified is not Unicode and this is a Windows platform. Will
/// strip out any "." path component, which signifies the current directory.
///
/// Note: This function does not support names over 100 bytes, or paths
/// over 255 bytes, even for formats that support longer names. Instead,
/// use `Builder` methods to insert a long-name extension at the same time
/// as the file content.
pub fn set_path<P: AsRef<Path>>(&mut self, p: P) -> io::Result<()> {
self._set_path(p.as_ref())
}
fn _set_path(&mut self, path: &Path) -> io::Result<()> {
if let Some(ustar) = self.as_ustar_mut() {
return ustar.set_path(path);
}
copy_path_into(&mut self.as_old_mut().name, path, false).map_err(|err| {
io::Error::new(
err.kind(),
format!("{} when setting path for {}", err, self.path_lossy()),
)
})
}
/// Returns the link name stored in this header, if any is found.
///
/// This method may fail if the pathname is not valid Unicode and this is
/// called on a Windows platform. `Ok(None)` being returned, however,
/// indicates that the link name was not present.
///
/// Note that this function will convert any `\` characters to directory
/// separators.
pub fn link_name(&self) -> io::Result<Option<Cow<Path>>> {
match self.link_name_bytes() {
Some(bytes) => bytes2path(bytes).map(Some),
None => Ok(None),
}
}
/// Returns the link name stored in this header as a byte array, if any.
///
/// This function is guaranteed to succeed, but you may wish to call the
/// `link_name` method to convert to a `Path`.
///
/// Note that this function will convert any `\` characters to directory
/// separators.
pub fn link_name_bytes(&self) -> Option<Cow<[u8]>> {
let old = self.as_old();
if old.linkname[0] != 0 {
Some(Cow::Borrowed(truncate(&old.linkname)))
} else {
None
}
}
/// Sets the link name for this header.
///
/// This function will set the linkname listed in this header, encoding it
/// in the appropriate format. May fail if the link name is too long or if
/// the path specified is not Unicode and this is a Windows platform. Will
/// strip out any "." path component, which signifies the current directory.
///
/// To use GNU long link names, prefer instead [`crate::Builder::append_link`].
pub fn set_link_name<P: AsRef<Path>>(&mut self, p: P) -> io::Result<()> {
self._set_link_name(p.as_ref())
}
fn _set_link_name(&mut self, path: &Path) -> io::Result<()> {
copy_path_into(&mut self.as_old_mut().linkname, path, true).map_err(|err| {
io::Error::new(
err.kind(),
format!("{} when setting link name for {}", err, self.path_lossy()),
)
})
}
/// Sets the link name for this header without any transformation.
///
/// This function is like [`Self::set_link_name`] but accepts an arbitrary byte array.
/// Hence it will not perform any canonicalization, such as replacing duplicate `//` with `/`.
pub fn set_link_name_literal<P: AsRef<[u8]>>(&mut self, p: P) -> io::Result<()> {
self._set_link_name_literal(p.as_ref())
}
fn _set_link_name_literal(&mut self, bytes: &[u8]) -> io::Result<()> {
copy_into(&mut self.as_old_mut().linkname, bytes)
}
/// Returns the mode bits for this file
///
/// May return an error if the field is corrupted.
pub fn mode(&self) -> io::Result<u32> {
octal_from(&self.as_old().mode)
.map(|u| u as u32)
.map_err(|err| {
io::Error::new(
err.kind(),
format!("{} when getting mode for {}", err, self.path_lossy()),
)
})
}
/// Encodes the `mode` provided into this header.
pub fn set_mode(&mut self, mode: u32) {
octal_into(&mut self.as_old_mut().mode, mode);
}
/// Returns the value of the owner's user ID field
///
/// May return an error if the field is corrupted.
pub fn uid(&self) -> io::Result<u64> {
num_field_wrapper_from(&self.as_old().uid)
.map(|u| u as u64)
.map_err(|err| {
io::Error::new(
err.kind(),
format!("{} when getting uid for {}", err, self.path_lossy()),
)
})
}
/// Encodes the `uid` provided into this header.
pub fn set_uid(&mut self, uid: u64) {
num_field_wrapper_into(&mut self.as_old_mut().uid, uid);
}
/// Returns the value of the group's user ID field
pub fn gid(&self) -> io::Result<u64> {
num_field_wrapper_from(&self.as_old().gid)
.map(|u| u as u64)
.map_err(|err| {
io::Error::new(
err.kind(),
format!("{} when getting gid for {}", err, self.path_lossy()),
)
})
}
/// Encodes the `gid` provided into this header.
pub fn set_gid(&mut self, gid: u64) {
num_field_wrapper_into(&mut self.as_old_mut().gid, gid);
}
/// Returns the last modification time in Unix time format
pub fn mtime(&self) -> io::Result<u64> {
num_field_wrapper_from(&self.as_old().mtime).map_err(|err| {
io::Error::new(
err.kind(),
format!("{} when getting mtime for {}", err, self.path_lossy()),
)
})
}
/// Encodes the `mtime` provided into this header.
///
/// Note that this time is typically a number of seconds passed since
/// January 1, 1970.
pub fn set_mtime(&mut self, mtime: u64) {
num_field_wrapper_into(&mut self.as_old_mut().mtime, mtime);
}
/// Return the user name of the owner of this file.
///
/// A return value of `Ok(Some(..))` indicates that the user name was
/// present and was valid utf-8, `Ok(None)` indicates that the user name is
/// not present in this archive format, and `Err` indicates that the user
/// name was present but was not valid utf-8.
pub fn username(&self) -> Result<Option<&str>, str::Utf8Error> {
match self.username_bytes() {
Some(bytes) => str::from_utf8(bytes).map(Some),
None => Ok(None),
}
}
/// Returns the user name of the owner of this file, if present.
///
/// A return value of `None` indicates that the user name is not present in
/// this header format.
pub fn username_bytes(&self) -> Option<&[u8]> {
if let Some(ustar) = self.as_ustar() {
Some(ustar.username_bytes())
} else if let Some(gnu) = self.as_gnu() {
Some(gnu.username_bytes())
} else {
None
}
}
/// Sets the username inside this header.
///
/// This function will return an error if this header format cannot encode a
/// user name or the name is too long.
pub fn set_username(&mut self, name: &str) -> io::Result<()> {
if let Some(ustar) = self.as_ustar_mut() {
return ustar.set_username(name);
}
if let Some(gnu) = self.as_gnu_mut() {
gnu.set_username(name)
} else {
Err(other("not a ustar or gnu archive, cannot set username"))
}
}
/// Return the group name of the owner of this file.
///
/// A return value of `Ok(Some(..))` indicates that the group name was
/// present and was valid utf-8, `Ok(None)` indicates that the group name is
/// not present in this archive format, and `Err` indicates that the group
/// name was present but was not valid utf-8.
pub fn groupname(&self) -> Result<Option<&str>, str::Utf8Error> {
match self.groupname_bytes() {
Some(bytes) => str::from_utf8(bytes).map(Some),
None => Ok(None),
}
}
/// Returns the group name of the owner of this file, if present.
///
/// A return value of `None` indicates that the group name is not present in
/// this header format.
pub fn groupname_bytes(&self) -> Option<&[u8]> {
if let Some(ustar) = self.as_ustar() {
Some(ustar.groupname_bytes())
} else if let Some(gnu) = self.as_gnu() {
Some(gnu.groupname_bytes())
} else {
None
}
}
/// Sets the group name inside this header.
///
/// This function will return an error if this header format cannot encode a
/// group name or the name is too long.
pub fn set_groupname(&mut self, name: &str) -> io::Result<()> {
if let Some(ustar) = self.as_ustar_mut() {
return ustar.set_groupname(name);
}
if let Some(gnu) = self.as_gnu_mut() {
gnu.set_groupname(name)
} else {
Err(other("not a ustar or gnu archive, cannot set groupname"))
}
}
/// Returns the device major number, if present.
///
/// This field may not be present in all archives, and it may not be
/// correctly formed in all archives. `Ok(Some(..))` means it was present
/// and correctly decoded, `Ok(None)` indicates that this header format does
/// not include the device major number, and `Err` indicates that it was
/// present and failed to decode.
pub fn device_major(&self) -> io::Result<Option<u32>> {
if let Some(ustar) = self.as_ustar() {
ustar.device_major().map(Some)
} else if let Some(gnu) = self.as_gnu() {
gnu.device_major().map(Some)
} else {
Ok(None)
}
}
/// Encodes the value `major` into the dev_major field of this header.
///
/// This function will return an error if this header format cannot encode a
/// major device number.
pub fn set_device_major(&mut self, major: u32) -> io::Result<()> {
if let Some(ustar) = self.as_ustar_mut() {
ustar.set_device_major(major);
Ok(())
} else if let Some(gnu) = self.as_gnu_mut() {
gnu.set_device_major(major);
Ok(())
} else {
Err(other("not a ustar or gnu archive, cannot set dev_major"))
}
}
/// Returns the device minor number, if present.
///
/// This field may not be present in all archives, and it may not be
/// correctly formed in all archives. `Ok(Some(..))` means it was present
/// and correctly decoded, `Ok(None)` indicates that this header format does
/// not include the device minor number, and `Err` indicates that it was
/// present and failed to decode.
pub fn device_minor(&self) -> io::Result<Option<u32>> {
if let Some(ustar) = self.as_ustar() {
ustar.device_minor().map(Some)
} else if let Some(gnu) = self.as_gnu() {
gnu.device_minor().map(Some)
} else {
Ok(None)
}
}
/// Encodes the value `minor` into the dev_minor field of this header.
///
/// This function will return an error if this header format cannot encode a
/// minor device number.
pub fn set_device_minor(&mut self, minor: u32) -> io::Result<()> {
if let Some(ustar) = self.as_ustar_mut() {
ustar.set_device_minor(minor);
Ok(())
} else if let Some(gnu) = self.as_gnu_mut() {
gnu.set_device_minor(minor);
Ok(())
} else {
Err(other("not a ustar or gnu archive, cannot set dev_minor"))
}
}
/// Returns the type of file described by this header.
pub fn entry_type(&self) -> EntryType {
EntryType::new(self.as_old().linkflag[0])
}
/// Sets the type of file that will be described by this header.
pub fn set_entry_type(&mut self, ty: EntryType) {
self.as_old_mut().linkflag = [ty.as_byte()];
}
/// Returns the checksum field of this header.
///
/// May return an error if the field is corrupted.
pub fn cksum(&self) -> io::Result<u32> {
octal_from(&self.as_old().cksum)
.map(|u| u as u32)
.map_err(|err| {
io::Error::new(
err.kind(),
format!("{} when getting cksum for {}", err, self.path_lossy()),
)
})
}
/// Sets the checksum field of this header based on the current fields in
/// this header.
pub fn set_cksum(&mut self) {
let cksum = self.calculate_cksum();
octal_into(&mut self.as_old_mut().cksum, cksum);
}
fn calculate_cksum(&self) -> u32 {
let old = self.as_old();
let start = old as *const _ as usize;
let cksum_start = old.cksum.as_ptr() as *const _ as usize;
let offset = cksum_start - start;
let len = old.cksum.len();
self.bytes[0..offset]
.iter()
.chain(iter::repeat(&b' ').take(len))
.chain(&self.bytes[offset + len..])
.fold(0, |a, b| a + (*b as u32))
}
fn fill_from(&mut self, meta: &fs::Metadata, mode: HeaderMode) {
self.fill_platform_from(meta, mode);
// Set size of directories to zero
self.set_size(if meta.is_dir() || meta.file_type().is_symlink() {
0
} else {
meta.len()
});
if let Some(ustar) = self.as_ustar_mut() {
ustar.set_device_major(0);
ustar.set_device_minor(0);
}
if let Some(gnu) = self.as_gnu_mut() {
gnu.set_device_major(0);
gnu.set_device_minor(0);
}
}
#[cfg(target_arch = "wasm32")]
#[allow(unused_variables)]
fn fill_platform_from(&mut self, meta: &fs::Metadata, mode: HeaderMode) {
unimplemented!();
}
#[cfg(unix)]
fn fill_platform_from(&mut self, meta: &fs::Metadata, mode: HeaderMode) {
match mode {
HeaderMode::Complete => {
self.set_mtime(meta.mtime() as u64);
self.set_uid(meta.uid() as u64);
self.set_gid(meta.gid() as u64);
self.set_mode(meta.mode() as u32);
}
HeaderMode::Deterministic => {
// We could in theory set the mtime to zero here, but not all
// tools seem to behave well when ingesting files with a 0
// timestamp. For example rust-lang/cargo#9512 shows that lldb
// doesn't ingest files with a zero timestamp correctly.
//
// We just need things to be deterministic here so just pick
// something that isn't zero. This time, chosen after careful
// deliberation, corresponds to Jul 23, 2006 -- the date of the
// first commit for what would become Rust.
self.set_mtime(1153704088);
self.set_uid(0);
self.set_gid(0);
// Use a default umask value, but propagate the (user) execute bit.
let fs_mode = if meta.is_dir() || (0o100 & meta.mode() == 0o100) {
0o755
} else {
0o644
};
self.set_mode(fs_mode);
}
}
// Note that if we are a GNU header we *could* set atime/ctime, except
// the `tar` utility doesn't do that by default and it causes problems
// with 7-zip [1].
//
// It's always possible to fill them out manually, so we just don't fill
// it out automatically here.
//
// [1]: https://github.com/alexcrichton/tar-rs/issues/70
// TODO: need to bind more file types
self.set_entry_type(entry_type(meta.mode()));
fn entry_type(mode: u32) -> EntryType {
match mode as libc::mode_t & libc::S_IFMT {
libc::S_IFREG => EntryType::file(),
libc::S_IFLNK => EntryType::symlink(),
libc::S_IFCHR => EntryType::character_special(),
libc::S_IFBLK => EntryType::block_special(),
libc::S_IFDIR => EntryType::dir(),
libc::S_IFIFO => EntryType::fifo(),
_ => EntryType::new(b' '),
}
}
}
#[cfg(windows)]
fn fill_platform_from(&mut self, meta: &fs::Metadata, mode: HeaderMode) {
// There's no concept of a file mode on Windows, so do a best approximation here.
match mode {
HeaderMode::Complete => {
self.set_uid(0);
self.set_gid(0);
// The dates listed in tarballs are always seconds relative to
// January 1, 1970. On Windows, however, the timestamps are returned as
// dates relative to January 1, 1601 (in 100ns intervals), so we need to
// add in some offset for those dates.
let mtime = (meta.last_write_time() / (1_000_000_000 / 100)) - 11644473600;
self.set_mtime(mtime);
let fs_mode = {
const FILE_ATTRIBUTE_READONLY: u32 = 0x00000001;
let readonly = meta.file_attributes() & FILE_ATTRIBUTE_READONLY;
match (meta.is_dir(), readonly != 0) {
(true, false) => 0o755,
(true, true) => 0o555,
(false, false) => 0o644,
(false, true) => 0o444,
}
};
self.set_mode(fs_mode);
}
HeaderMode::Deterministic => {
self.set_uid(0);
self.set_gid(0);
self.set_mtime(123456789); // see above in unix
let fs_mode = if meta.is_dir() { 0o755 } else { 0o644 };
self.set_mode(fs_mode);
}
}
let ft = meta.file_type();
self.set_entry_type(if ft.is_dir() {
EntryType::dir()
} else if ft.is_file() {
EntryType::file()
} else if ft.is_symlink() {
EntryType::symlink()
} else {
EntryType::new(b' ')
});
}
fn debug_fields(&self, b: &mut fmt::DebugStruct) {
if let Ok(entry_size) = self.entry_size() {
b.field("entry_size", &entry_size);
}
if let Ok(size) = self.size() {
b.field("size", &size);
}
if let Ok(path) = self.path() {
b.field("path", &path);
}
if let Ok(link_name) = self.link_name() {
b.field("link_name", &link_name);
}
if let Ok(mode) = self.mode() {
b.field("mode", &DebugAsOctal(mode));
}
if let Ok(uid) = self.uid() {
b.field("uid", &uid);
}
if let Ok(gid) = self.gid() {
b.field("gid", &gid);
}
if let Ok(mtime) = self.mtime() {
b.field("mtime", &mtime);
}
if let Ok(username) = self.username() {
b.field("username", &username);
}
if let Ok(groupname) = self.groupname() {
b.field("groupname", &groupname);
}
if let Ok(device_major) = self.device_major() {
b.field("device_major", &device_major);
}
if let Ok(device_minor) = self.device_minor() {
b.field("device_minor", &device_minor);
}
if let Ok(cksum) = self.cksum() {
b.field("cksum", &cksum);
b.field("cksum_valid", &(cksum == self.calculate_cksum()));
}
}
}
struct DebugAsOctal<T>(T);
impl<T: fmt::Octal> fmt::Debug for DebugAsOctal<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Octal::fmt(&self.0, f)
}
}
unsafe fn cast<T, U>(a: &T) -> &U {
assert_eq!(mem::size_of_val(a), mem::size_of::<U>());
assert_eq!(mem::align_of_val(a), mem::align_of::<U>());
&*(a as *const T as *const U)
}
unsafe fn cast_mut<T, U>(a: &mut T) -> &mut U {
assert_eq!(mem::size_of_val(a), mem::size_of::<U>());
assert_eq!(mem::align_of_val(a), mem::align_of::<U>());
&mut *(a as *mut T as *mut U)
}
impl Clone for Header {
fn clone(&self) -> Header {
Header { bytes: self.bytes }
}
}
impl fmt::Debug for Header {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(me) = self.as_ustar() {
me.fmt(f)
} else if let Some(me) = self.as_gnu() {
me.fmt(f)
} else {
self.as_old().fmt(f)
}
}
}
impl OldHeader {
/// Views this as a normal `Header`
pub fn as_header(&self) -> &Header {
unsafe { cast(self) }
}
/// Views this as a normal `Header`
pub fn as_header_mut(&mut self) -> &mut Header {
unsafe { cast_mut(self) }
}
}
impl fmt::Debug for OldHeader {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut f = f.debug_struct("OldHeader");
self.as_header().debug_fields(&mut f);
f.finish()
}
}
impl UstarHeader {
/// See `Header::path_bytes`
pub fn path_bytes(&self) -> Cow<[u8]> {
if self.prefix[0] == 0 && !self.name.contains(&b'\\') {
Cow::Borrowed(truncate(&self.name))
} else {
let mut bytes = Vec::new();
let prefix = truncate(&self.prefix);
if !prefix.is_empty() {
bytes.extend_from_slice(prefix);
bytes.push(b'/');
}
bytes.extend_from_slice(truncate(&self.name));
Cow::Owned(bytes)
}
}
/// Gets the path in a "lossy" way, used for error reporting ONLY.
fn path_lossy(&self) -> String {
String::from_utf8_lossy(&self.path_bytes()).to_string()
}
/// See `Header::set_path`
pub fn set_path<P: AsRef<Path>>(&mut self, p: P) -> io::Result<()> {
self._set_path(p.as_ref())
}
fn _set_path(&mut self, path: &Path) -> io::Result<()> {
// This can probably be optimized quite a bit more, but for now just do
// something that's relatively easy and readable.
//
// First up, if the path fits within `self.name` then we just shove it
// in there. If not then we try to split it between some existing path
// components where it can fit in name/prefix. To do that we peel off
// enough until the path fits in `prefix`, then we try to put both
// halves into their destination.
let bytes = path2bytes(path)?;
let (maxnamelen, maxprefixlen) = (self.name.len(), self.prefix.len());
if bytes.len() <= maxnamelen {
copy_path_into(&mut self.name, path, false).map_err(|err| {
io::Error::new(
err.kind(),
format!("{} when setting path for {}", err, self.path_lossy()),
)
})?;
} else {
let mut prefix = path;
let mut prefixlen;
loop {
match prefix.parent() {
Some(parent) => prefix = parent,
None => {
return Err(other(&format!(
"path cannot be split to be inserted into archive: {}",
path.display()
)));
}
}
prefixlen = path2bytes(prefix)?.len();
if prefixlen <= maxprefixlen {
break;
}
}
copy_path_into(&mut self.prefix, prefix, false).map_err(|err| {
io::Error::new(
err.kind(),
format!("{} when setting path for {}", err, self.path_lossy()),
)
})?;
let path = bytes2path(Cow::Borrowed(&bytes[prefixlen + 1..]))?;
copy_path_into(&mut self.name, &path, false).map_err(|err| {
io::Error::new(
err.kind(),
format!("{} when setting path for {}", err, self.path_lossy()),
)
})?;
}
Ok(())
}
/// See `Header::username_bytes`
pub fn username_bytes(&self) -> &[u8] {
truncate(&self.uname)
}
/// See `Header::set_username`
pub fn set_username(&mut self, name: &str) -> io::Result<()> {
copy_into(&mut self.uname, name.as_bytes()).map_err(|err| {
io::Error::new(
err.kind(),
format!("{} when setting username for {}", err, self.path_lossy()),
)
})
}
/// See `Header::groupname_bytes`
pub fn groupname_bytes(&self) -> &[u8] {
truncate(&self.gname)
}
/// See `Header::set_groupname`
pub fn set_groupname(&mut self, name: &str) -> io::Result<()> {
copy_into(&mut self.gname, name.as_bytes()).map_err(|err| {
io::Error::new(
err.kind(),
format!("{} when setting groupname for {}", err, self.path_lossy()),
)
})
}
/// See `Header::device_major`
pub fn device_major(&self) -> io::Result<u32> {
octal_from(&self.dev_major)
.map(|u| u as u32)
.map_err(|err| {
io::Error::new(
err.kind(),
format!(
"{} when getting device_major for {}",
err,
self.path_lossy()
),
)
})
}
/// See `Header::set_device_major`
pub fn set_device_major(&mut self, major: u32) {
octal_into(&mut self.dev_major, major);
}
/// See `Header::device_minor`
pub fn device_minor(&self) -> io::Result<u32> {
octal_from(&self.dev_minor)
.map(|u| u as u32)
.map_err(|err| {
io::Error::new(
err.kind(),
format!(
"{} when getting device_minor for {}",
err,
self.path_lossy()
),
)
})
}
/// See `Header::set_device_minor`
pub fn set_device_minor(&mut self, minor: u32) {
octal_into(&mut self.dev_minor, minor);
}
/// Views this as a normal `Header`
pub fn as_header(&self) -> &Header {
unsafe { cast(self) }
}
/// Views this as a normal `Header`
pub fn as_header_mut(&mut self) -> &mut Header {
unsafe { cast_mut(self) }
}
}
impl fmt::Debug for UstarHeader {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut f = f.debug_struct("UstarHeader");
self.as_header().debug_fields(&mut f);
f.finish()
}
}
impl GnuHeader {
/// See `Header::username_bytes`
pub fn username_bytes(&self) -> &[u8] {
truncate(&self.uname)
}
/// Gets the fullname (group:user) in a "lossy" way, used for error reporting ONLY.
fn fullname_lossy(&self) -> String {
format!(
"{}:{}",
String::from_utf8_lossy(self.groupname_bytes()),
String::from_utf8_lossy(self.username_bytes()),
)
}
/// See `Header::set_username`
pub fn set_username(&mut self, name: &str) -> io::Result<()> {
copy_into(&mut self.uname, name.as_bytes()).map_err(|err| {
io::Error::new(
err.kind(),
format!(
"{} when setting username for {}",
err,
self.fullname_lossy()
),
)
})
}
/// See `Header::groupname_bytes`
pub fn groupname_bytes(&self) -> &[u8] {
truncate(&self.gname)
}
/// See `Header::set_groupname`
pub fn set_groupname(&mut self, name: &str) -> io::Result<()> {
copy_into(&mut self.gname, name.as_bytes()).map_err(|err| {
io::Error::new(
err.kind(),
format!(
"{} when setting groupname for {}",
err,
self.fullname_lossy()
),
)
})
}
/// See `Header::device_major`
pub fn device_major(&self) -> io::Result<u32> {
octal_from(&self.dev_major)
.map(|u| u as u32)
.map_err(|err| {
io::Error::new(
err.kind(),
format!(
"{} when getting device_major for {}",
err,
self.fullname_lossy()
),
)
})
}
/// See `Header::set_device_major`
pub fn set_device_major(&mut self, major: u32) {
octal_into(&mut self.dev_major, major);
}
/// See `Header::device_minor`
pub fn device_minor(&self) -> io::Result<u32> {
octal_from(&self.dev_minor)
.map(|u| u as u32)
.map_err(|err| {
io::Error::new(
err.kind(),
format!(
"{} when getting device_minor for {}",
err,
self.fullname_lossy()
),
)
})
}
/// See `Header::set_device_minor`
pub fn set_device_minor(&mut self, minor: u32) {
octal_into(&mut self.dev_minor, minor);
}
/// Returns the last modification time in Unix time format
pub fn atime(&self) -> io::Result<u64> {
num_field_wrapper_from(&self.atime).map_err(|err| {
io::Error::new(
err.kind(),
format!("{} when getting atime for {}", err, self.fullname_lossy()),
)
})
}
/// Encodes the `atime` provided into this header.
///
/// Note that this time is typically a number of seconds passed since
/// January 1, 1970.
pub fn set_atime(&mut self, atime: u64) {
num_field_wrapper_into(&mut self.atime, atime);
}
/// Returns the last modification time in Unix time format
pub fn ctime(&self) -> io::Result<u64> {
num_field_wrapper_from(&self.ctime).map_err(|err| {
io::Error::new(
err.kind(),
format!("{} when getting ctime for {}", err, self.fullname_lossy()),
)
})
}
/// Encodes the `ctime` provided into this header.
///
/// Note that this time is typically a number of seconds passed since
/// January 1, 1970.
pub fn set_ctime(&mut self, ctime: u64) {
num_field_wrapper_into(&mut self.ctime, ctime);
}
/// Returns the "real size" of the file this header represents.
///
/// This is applicable for sparse files where the returned size here is the
/// size of the entire file after the sparse regions have been filled in.
pub fn real_size(&self) -> io::Result<u64> {
octal_from(&self.realsize).map_err(|err| {
io::Error::new(
err.kind(),
format!(
"{} when getting real_size for {}",
err,
self.fullname_lossy()
),
)
})
}
/// Indicates whether this header will be followed by additional
/// sparse-header records.
///
/// Note that this is handled internally by this library, and is likely only
/// interesting if a `raw` iterator is being used.
pub fn is_extended(&self) -> bool {
self.isextended[0] == 1
}
/// Views this as a normal `Header`
pub fn as_header(&self) -> &Header {
unsafe { cast(self) }
}
/// Views this as a normal `Header`
pub fn as_header_mut(&mut self) -> &mut Header {
unsafe { cast_mut(self) }
}
}
impl fmt::Debug for GnuHeader {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut f = f.debug_struct("GnuHeader");
self.as_header().debug_fields(&mut f);
if let Ok(atime) = self.atime() {
f.field("atime", &atime);
}
if let Ok(ctime) = self.ctime() {
f.field("ctime", &ctime);
}
f.field("is_extended", &self.is_extended())
.field("sparse", &DebugSparseHeaders(&self.sparse))
.finish()
}
}
struct DebugSparseHeaders<'a>(&'a [GnuSparseHeader]);
impl<'a> fmt::Debug for DebugSparseHeaders<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut f = f.debug_list();
for header in self.0 {
if !header.is_empty() {
f.entry(header);
}
}
f.finish()
}
}
impl GnuSparseHeader {
/// Returns true if block is empty
pub fn is_empty(&self) -> bool {
self.offset[0] == 0 || self.numbytes[0] == 0
}
/// Offset of the block from the start of the file
///
/// Returns `Err` for a malformed `offset` field.
pub fn offset(&self) -> io::Result<u64> {
octal_from(&self.offset).map_err(|err| {
io::Error::new(
err.kind(),
format!("{} when getting offset from sparse header", err),
)
})
}
/// Length of the block
///
/// Returns `Err` for a malformed `numbytes` field.
pub fn length(&self) -> io::Result<u64> {
octal_from(&self.numbytes).map_err(|err| {
io::Error::new(
err.kind(),
format!("{} when getting length from sparse header", err),
)
})
}
}
impl fmt::Debug for GnuSparseHeader {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut f = f.debug_struct("GnuSparseHeader");
if let Ok(offset) = self.offset() {
f.field("offset", &offset);
}
if let Ok(length) = self.length() {
f.field("length", &length);
}
f.finish()
}
}
impl GnuExtSparseHeader {
/// Crates a new zero'd out sparse header entry.
pub fn new() -> GnuExtSparseHeader {
unsafe { mem::zeroed() }
}
/// Returns a view into this header as a byte array.
pub fn as_bytes(&self) -> &[u8; 512] {
debug_assert_eq!(mem::size_of_val(self), 512);
unsafe { mem::transmute(self) }
}
/// Returns a view into this header as a byte array.
pub fn as_mut_bytes(&mut self) -> &mut [u8; 512] {
debug_assert_eq!(mem::size_of_val(self), 512);
unsafe { mem::transmute(self) }
}
/// Returns a slice of the underlying sparse headers.
///
/// Some headers may represent empty chunks of both the offset and numbytes
/// fields are 0.
pub fn sparse(&self) -> &[GnuSparseHeader; 21] {
&self.sparse
}
/// Indicates if another sparse header should be following this one.
pub fn is_extended(&self) -> bool {
self.isextended[0] == 1
}
}
impl Default for GnuExtSparseHeader {
fn default() -> Self {
Self::new()
}
}
fn octal_from(slice: &[u8]) -> io::Result<u64> {
let trun = truncate(slice);
let num = match str::from_utf8(trun) {
Ok(n) => n,
Err(_) => {
return Err(other(&format!(
"numeric field did not have utf-8 text: {}",
String::from_utf8_lossy(trun)
)));
}
};
match u64::from_str_radix(num.trim(), 8) {
Ok(n) => Ok(n),
Err(_) => Err(other(&format!("numeric field was not a number: {}", num))),
}
}
fn octal_into<T: fmt::Octal>(dst: &mut [u8], val: T) {
let o = format!("{:o}", val);
let value = o.bytes().rev().chain(repeat(b'0'));
for (slot, value) in dst.iter_mut().rev().skip(1).zip(value) {
*slot = value;
}
}
// Wrapper to figure out if we should fill the header field using tar's numeric
// extension (binary) or not (octal).
fn num_field_wrapper_into(dst: &mut [u8], src: u64) {
if src >= 8589934592 || (src >= 2097152 && dst.len() == 8) {
numeric_extended_into(dst, src);
} else {
octal_into(dst, src);
}
}
// Wrapper to figure out if we should read the header field in binary (numeric
// extension) or octal (standard encoding).
fn num_field_wrapper_from(src: &[u8]) -> io::Result<u64> {
if src[0] & 0x80 != 0 {
Ok(numeric_extended_from(src))
} else {
octal_from(src)
}
}
// When writing numeric fields with is the extended form, the high bit of the
// first byte is set to 1 and the remainder of the field is treated as binary
// instead of octal ascii.
// This handles writing u64 to 8 (uid, gid) or 12 (size, *time) bytes array.
fn numeric_extended_into(dst: &mut [u8], src: u64) {
let len: usize = dst.len();
for (slot, val) in dst.iter_mut().zip(
repeat(0)
.take(len - 8) // to zero init extra bytes
.chain((0..8).rev().map(|x| ((src >> (8 * x)) & 0xff) as u8)),
) {
*slot = val;
}
dst[0] |= 0x80;
}
fn numeric_extended_from(src: &[u8]) -> u64 {
let mut dst: u64 = 0;
let mut b_to_skip = 1;
if src.len() == 8 {
// read first byte without extension flag bit
dst = (src[0] ^ 0x80) as u64;
} else {
// only read last 8 bytes
b_to_skip = src.len() - 8;
}
for byte in src.iter().skip(b_to_skip) {
dst <<= 8;
dst |= *byte as u64;
}
dst
}
fn truncate(slice: &[u8]) -> &[u8] {
match slice.iter().position(|i| *i == 0) {
Some(i) => &slice[..i],
None => slice,
}
}
/// Copies `bytes` into the `slot` provided, returning an error if the `bytes`
/// array is too long or if it contains any nul bytes.
fn copy_into(slot: &mut [u8], bytes: &[u8]) -> io::Result<()> {
if bytes.len() > slot.len() {
Err(other("provided value is too long"))
} else if bytes.iter().any(|b| *b == 0) {
Err(other("provided value contains a nul byte"))
} else {
for (slot, val) in slot.iter_mut().zip(bytes.iter().chain(Some(&0))) {
*slot = *val;
}
Ok(())
}
}
/// Copies `path` into the `slot` provided
///
/// Returns an error if:
///
/// * the path is too long to fit
/// * a nul byte was found
/// * an invalid path component is encountered (e.g. a root path or parent dir)
/// * the path itself is empty
fn copy_path_into(mut slot: &mut [u8], path: &Path, is_link_name: bool) -> io::Result<()> {
let mut emitted = false;
let mut needs_slash = false;
for component in path.components() {
let bytes = path2bytes(Path::new(component.as_os_str()))?;
match (component, is_link_name) {
(Component::Prefix(..), false) | (Component::RootDir, false) => {
return Err(other("paths in archives must be relative"));
}
(Component::ParentDir, false) => {
return Err(other("paths in archives must not have `..`"));
}
// Allow "./" as the path
(Component::CurDir, false) if path.components().count() == 1 => {}
(Component::CurDir, false) => continue,
(Component::Normal(_), _) | (_, true) => {}
};
if needs_slash {
copy(&mut slot, b"/")?;
}
if bytes.contains(&b'/') {
if let Component::Normal(..) = component {
return Err(other("path component in archive cannot contain `/`"));
}
}
copy(&mut slot, &*bytes)?;
if &*bytes != b"/" {
needs_slash = true;
}
emitted = true;
}
if !emitted {
return Err(other("paths in archives must have at least one component"));
}
if ends_with_slash(path) {
copy(&mut slot, &[b'/'])?;
}
return Ok(());
fn copy(slot: &mut &mut [u8], bytes: &[u8]) -> io::Result<()> {
copy_into(*slot, bytes)?;
let tmp = mem::replace(slot, &mut []);
*slot = &mut tmp[bytes.len()..];
Ok(())
}
}
#[cfg(target_arch = "wasm32")]
fn ends_with_slash(p: &Path) -> bool {
p.to_string_lossy().ends_with('/')
}
#[cfg(windows)]
fn ends_with_slash(p: &Path) -> bool {
let last = p.as_os_str().encode_wide().last();
last == Some(b'/' as u16) || last == Some(b'\\' as u16)
}
#[cfg(unix)]
fn ends_with_slash(p: &Path) -> bool {
p.as_os_str().as_bytes().ends_with(&[b'/'])
}
#[cfg(any(windows, target_arch = "wasm32"))]
pub fn path2bytes(p: &Path) -> io::Result<Cow<[u8]>> {
p.as_os_str()
.to_str()
.map(|s| s.as_bytes())
.ok_or_else(|| other(&format!("path {} was not valid Unicode", p.display())))
.map(|bytes| {
if bytes.contains(&b'\\') {
// Normalize to Unix-style path separators
let mut bytes = bytes.to_owned();
for b in &mut bytes {
if *b == b'\\' {
*b = b'/';
}
}
Cow::Owned(bytes)
} else {
Cow::Borrowed(bytes)
}
})
}
#[cfg(unix)]
/// On unix this will never fail
pub fn path2bytes(p: &Path) -> io::Result<Cow<[u8]>> {
Ok(p.as_os_str().as_bytes()).map(Cow::Borrowed)
}
#[cfg(windows)]
/// On windows we cannot accept non-Unicode bytes because it
/// is impossible to convert it to UTF-16.
pub fn bytes2path(bytes: Cow<[u8]>) -> io::Result<Cow<Path>> {
return match bytes {
Cow::Borrowed(bytes) => {
let s = str::from_utf8(bytes).map_err(|_| not_unicode(bytes))?;
Ok(Cow::Borrowed(Path::new(s)))
}
Cow::Owned(bytes) => {
let s = String::from_utf8(bytes).map_err(|uerr| not_unicode(&uerr.into_bytes()))?;
Ok(Cow::Owned(PathBuf::from(s)))
}
};
fn not_unicode(v: &[u8]) -> io::Error {
other(&format!(
"only Unicode paths are supported on Windows: {}",
String::from_utf8_lossy(v)
))
}
}
#[cfg(unix)]
/// On unix this operation can never fail.
pub fn bytes2path(bytes: Cow<[u8]>) -> io::Result<Cow<Path>> {
use std::ffi::{OsStr, OsString};
Ok(match bytes {
Cow::Borrowed(bytes) => Cow::Borrowed(Path::new(OsStr::from_bytes(bytes))),
Cow::Owned(bytes) => Cow::Owned(PathBuf::from(OsString::from_vec(bytes))),
})
}
#[cfg(target_arch = "wasm32")]
pub fn bytes2path(bytes: Cow<[u8]>) -> io::Result<Cow<Path>> {
Ok(match bytes {
Cow::Borrowed(bytes) => {
Cow::Borrowed({ Path::new(str::from_utf8(bytes).map_err(invalid_utf8)?) })
}
Cow::Owned(bytes) => {
Cow::Owned({ PathBuf::from(String::from_utf8(bytes).map_err(invalid_utf8)?) })
}
})
}
#[cfg(target_arch = "wasm32")]
fn invalid_utf8<T>(_: T) -> io::Error {
io::Error::new(io::ErrorKind::InvalidData, "Invalid utf-8")
}
|
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
pub mod psk;
use crate::rsna::{Dot11VerifiedKeyFrame, UpdateSink};
use failure;
use zerocopy::ByteSlice;
#[derive(Debug, PartialEq)]
pub enum Method {
Psk(psk::Psk),
}
impl Method {
pub fn from_config(cfg: Config) -> Result<Method, failure::Error> {
match cfg {
Config::ComputedPsk(psk) => Ok(Method::Psk(psk)),
}
}
// Unused as only PSK is supported so far.
pub fn on_eapol_key_frame<B: ByteSlice>(
&self,
_update_sink: &mut UpdateSink,
_frame: Dot11VerifiedKeyFrame<B>,
) -> Result<(), failure::Error> {
Ok(())
}
}
#[derive(Debug, PartialEq)]
pub enum Config {
ComputedPsk(psk::Psk),
}
|
#![feature(num_as_ne_bytes)]
#![feature(option_result_contains)]
pub mod database;
pub mod user;
pub mod chatroom;
pub mod data;
pub mod protocol;
pub mod ui; |
use derive_more::Display;
use std::fmt::{self, Debug, Display, Formatter};
type BoxError = Box<dyn std::error::Error + Send + Sync>;
/// A set of errors that can occur during parsing multipart stream and in other operations.
#[derive(Display)]
#[display(fmt = "multer: {}")]
pub enum Error {
/// An unknown field is detected when multipart [`constraints`](./struct.Constraints.html#method.allowed_fields) are added.
#[display(
fmt = "An unknown field is detected: {}",
"field_name.as_deref().unwrap_or(\"<unknown>\")"
)]
UnknownField { field_name: Option<String> },
/// The field data is found incomplete.
#[display(
fmt = "Incomplete field data for field: {}",
"field_name.as_deref().unwrap_or(\"<unknown>\")"
)]
IncompleteFieldData { field_name: Option<String> },
/// Couldn't read the field headers completely.
#[display(fmt = "Incomplete headers, couldn't read the field headers completely")]
IncompleteHeaders,
/// Failed to read headers.
#[display(fmt = "Failed to read headers: {}", _0)]
ReadHeaderFailed(BoxError),
/// Failed to decode the field's raw header name to [`HeaderName`](https://docs.rs/http/0.2.1/http/header/struct.HeaderName.html) type.
#[display(fmt = "Failed to decode the field's raw header name: {}", cause)]
DecodeHeaderName { name: String, cause: BoxError },
/// Failed to decode the field's raw header value to [`HeaderValue`](https://docs.rs/http/0.2.1/http/header/struct.HeaderValue.html) type.
#[display(fmt = "Failed to decode the field's raw header value: {}", cause)]
DecodeHeaderValue { value: Vec<u8>, cause: BoxError },
/// Multipart stream is incomplete.
#[display(fmt = "Multipart stream is incomplete")]
IncompleteStream,
/// The incoming field size exceeded the maximum limit.
#[display(
fmt = "Incoming field size exceeded the maximum limit: {} bytes, field name: {}",
limit,
"field_name.as_deref().unwrap_or(\"<unknown>\")"
)]
FieldSizeExceeded { limit: u64, field_name: Option<String> },
/// The incoming stream size exceeded the maximum limit.
#[display(fmt = "Stream size exceeded the maximum limit: {} bytes", limit)]
StreamSizeExceeded { limit: u64 },
/// Stream read failed.
#[display(fmt = "Stream read failed: {}", _0)]
StreamReadFailed(BoxError),
/// Failed to lock the multipart shared state for any changes.
#[display(fmt = "Couldn't lock the multipart state: {}", _0)]
LockFailure(BoxError),
/// The `Content-Type` header is not `multipart/form-data`.
#[display(fmt = "The Content-Type is not multipart/form-data")]
NoMultipart,
/// Failed to convert the `Content-Type` to [`mime::Mime`](https://docs.rs/mime/0.3.16/mime/struct.Mime.html) type.
#[display(fmt = "Failed to convert the Content-Type to `mime::Mime` type: {}", _0)]
DecodeContentType(BoxError),
/// No boundary found in `Content-Type` header.
#[display(fmt = "No boundary found in Content-Type header")]
NoBoundary,
/// Failed to decode the field data as `JSON` in [`field.json()`](./struct.Field.html#method.json) method.
#[cfg(feature = "json")]
#[display(fmt = "Failed to decode the field data as JSON: {}", _0)]
DecodeJson(BoxError),
#[doc(hidden)]
__Nonexhaustive,
}
impl Debug for Error {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Display::fmt(self, f)
}
}
impl std::error::Error for Error {}
impl PartialEq for Error {
fn eq(&self, other: &Self) -> bool {
self.to_string().eq(&other.to_string())
}
}
impl Eq for Error {}
|
use crate::{DocBase, VarType};
const DESCRIPTION: &'static str = r#"
The dmi function returns the directional movement index.
"#;
const EXAMPLE: &'static str = r#"
```pine
study(title="Directional Movement Index", shorttitle="DMI", format=format.price, precision=4)
len = input(17, minval=1, title="DI Length")
lensig = input(14, title="ADX Smoothing", minval=1, maxval=50)
[diplus, diminus, adx] = dmi(len, lensig)
plot(adx, color=color.red, title="ADX")
plot(diplus, color=color.blue, title="+DI")
plot(diminus, color=color.orange, title="-DI")
```
"#;
const ARGUMENTS: &'static str = r#"
**diLength (int)** DI Period.
**adxSmoothing (int)** ADX Smoothing Period.
"#;
pub fn gen_doc() -> Vec<DocBase> {
let fn_doc = DocBase {
var_type: VarType::Function,
name: "dmi",
signatures: vec![],
description: DESCRIPTION,
example: EXAMPLE,
returns: "Tuple of three DMI series: Positive Directional Movement (+DI), Negative Directional Movement (-DI) and Average Directional Movement Index (ADX).",
arguments: ARGUMENTS,
remarks: "",
links: "[rsi](#fun-rsi) [tsi](#fun-tsi) [mfi](#fun-mfi)",
};
vec![fn_doc]
}
|
use std::fmt;
#[derive(PartialEq, Copy, Clone)]
pub enum Color {
White,
Black,
}
#[derive(PartialEq, Clone, Debug)]
pub enum PieceType {
Pawn,
Rook,
Knight,
Bishop,
King,
Queen,
}
#[derive(Clone)]
pub struct Piece {
pub color: Color,
pub piece_type: PieceType,
pub movement: ((isize, isize), Option<(isize, isize)>),
pub has_moved: bool,
pub moves_continous: bool,
}
impl fmt::Display for PieceType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let symbol = match self {
PieceType::Pawn => ' ',
PieceType::Rook => 'R',
PieceType::Knight => 'N',
PieceType::Bishop => 'B',
PieceType::King => 'K',
PieceType::Queen => 'Q',
};
write!(f, "{}", symbol)
}
}
impl Piece {
pub fn moved(&mut self) {
if !self.has_moved {
self.has_moved = true;
}
}
}
impl fmt::Display for Piece {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let symbol: char = if self.color == Color::White {
match self.piece_type {
PieceType::Pawn => '\u{265F}',
PieceType::Rook => '\u{265C}',
PieceType::Knight => '\u{265E}',
PieceType::Bishop => '\u{265D}',
PieceType::King => '\u{265A}',
PieceType::Queen => '\u{265B}',
}
} else {
match self.piece_type {
PieceType::Pawn => '\u{2659}',
PieceType::Rook => '\u{2656}',
PieceType::Knight => '\u{2658}',
PieceType::Bishop => '\u{2657}',
PieceType::King => '\u{2654}',
PieceType::Queen => '\u{2655}',
}
};
write!(f, "{}", symbol)
}
}
pub fn piece_make(color: Color, piece_type: PieceType) -> Piece {
Piece {
color,
has_moved: false,
movement: match &piece_type {
PieceType::Pawn => ((0, 1), Some((1, 1))),
PieceType::Rook => ((0, 1), None),
PieceType::Knight => ((1, 2), Some((2, 1))),
PieceType::Bishop => ((1, 1), None),
PieceType::King => ((0, 1), Some((1, 1))),
PieceType::Queen => ((0, 1), Some((1, 1))),
},
moves_continous: match &piece_type {
PieceType::Pawn => false,
PieceType::Rook => true,
PieceType::Knight => false,
PieceType::Bishop => true,
PieceType::King => false,
PieceType::Queen => true,
},
piece_type,
}
}
|
//! A mock state.
#![cfg(test)]
use async_trait::async_trait;
use futures::{
channel::mpsc::{Receiver, Sender},
future::BoxFuture,
SinkExt, StreamExt,
};
use k8s_openapi::{apimachinery::pkg::apis::meta::v1::ObjectMeta, Metadata};
/// The kind of item-scoped operation.
#[derive(Debug, PartialEq, Eq)]
pub enum OpKind {
/// Item added.
Add,
/// Item updated.
Update,
/// Item deleted.
Delete,
}
/// An event that's send to the test scenario driver for operations flow.
pub enum ScenarioEvent<T>
where
T: Metadata<Ty = ObjectMeta> + Send,
{
/// An item-scoped operation.
Item(T, OpKind),
/// Resync operation.
Resync,
/// Maintenance is performed.
Maintenance,
}
impl<T> ScenarioEvent<T>
where
T: Metadata<Ty = ObjectMeta> + Send,
{
pub fn unwrap_op(self) -> (T, OpKind) {
match self {
ScenarioEvent::Item(val, op) => (val, op),
_ => panic!("unwrap_op on non-item op"),
}
}
}
/// Mock writer.
///
/// Uses channels to communicate with the test scenario driver.
///
/// When the call is made on the mock - sends an event to the `events_tx` and
/// waits for at action to conduct in response to the event `actions_rx`.
///
/// Note: the only action available in the [`super::Write`] is to just continue
/// and return.
pub struct Writer<T>
where
T: Metadata<Ty = ObjectMeta> + Send,
{
events_tx: Sender<ScenarioEvent<T>>,
actions_rx: Receiver<()>,
maintenance_request: Option<(Sender<()>, Receiver<()>)>,
}
impl<T> Writer<T>
where
T: Metadata<Ty = ObjectMeta> + Send,
{
/// Create a new mock writer.
/// Takes:
/// - `events_tx` - a message is sent here at the beginning of the
/// operation.
/// - `actions_rx` - a message is read from here before the operation
/// returns.
pub fn new(events_tx: Sender<ScenarioEvent<T>>, actions_rx: Receiver<()>) -> Self {
Self {
events_tx,
actions_rx,
maintenance_request: None,
}
}
/// Create a new mock writer (with maintenance flow).
/// Takes:
/// - `events_tx` - a message is sent here at the beginning of the
/// operation.
/// - `actions_rx` - a message is read from here before the operation
/// returns;
/// - `maintenance_request_events_tx` - a message is sent here at the
/// beginning of the maintenance request;
/// - `maintenance_request_events_tx` - a message is read from here before
/// the maintenance request returns.
pub fn new_with_maintenance(
events_tx: Sender<ScenarioEvent<T>>,
actions_rx: Receiver<()>,
maintenance_request_events_tx: Sender<()>,
maintenance_request_actions_rx: Receiver<()>,
) -> Self {
Self {
events_tx,
actions_rx,
maintenance_request: Some((
maintenance_request_events_tx,
maintenance_request_actions_rx,
)),
}
}
}
#[async_trait]
impl<T> super::Write for Writer<T>
where
T: Metadata<Ty = ObjectMeta> + Send,
{
type Item = T;
async fn add(&mut self, item: Self::Item) {
self.events_tx
.send(ScenarioEvent::Item(item, OpKind::Add))
.await
.unwrap();
self.actions_rx.next().await.unwrap();
}
async fn update(&mut self, item: Self::Item) {
self.events_tx
.send(ScenarioEvent::Item(item, OpKind::Update))
.await
.unwrap();
self.actions_rx.next().await.unwrap();
}
async fn delete(&mut self, item: Self::Item) {
self.events_tx
.send(ScenarioEvent::Item(item, OpKind::Delete))
.await
.unwrap();
self.actions_rx.next().await.unwrap();
}
async fn resync(&mut self) {
self.events_tx.send(ScenarioEvent::Resync).await.unwrap();
self.actions_rx.next().await.unwrap();
}
}
#[async_trait]
impl<T> super::MaintainedWrite for Writer<T>
where
T: Metadata<Ty = ObjectMeta> + Send,
{
fn maintenance_request(&mut self) -> Option<BoxFuture<'_, ()>> {
if let Some((ref mut events_tx, ref mut actions_rx)) = self.maintenance_request {
Some(Box::pin(async move {
events_tx.send(()).await.unwrap();
actions_rx.next().await.unwrap();
}))
} else {
None
}
}
async fn perform_maintenance(&mut self) {
self.events_tx
.send(ScenarioEvent::Maintenance)
.await
.unwrap();
self.actions_rx.next().await.unwrap();
}
}
|
use anyhow::Context;
use pathfinder_common::{ContractAddress, StorageValue};
use stark_hash::Felt;
use crate::params::{params, RowExt};
/// This migration adds the system contract updates which were mistakenly never inserted.
///
/// Thankfully we can avoid looking these values up in the state trie as the values can
/// be entirely determined from past blocks.
///
/// Each block, the system contract at 0x1 gets a new storage item referencing the block number
/// and hash from 10 blocks in the past.
/// key = block number
/// value = block hash
pub(crate) fn migrate(tx: &rusqlite::Transaction<'_>) -> anyhow::Result<()> {
let mut select = tx
.prepare_cached(
r"SELECT current.number, past.number, past.hash FROM starknet_blocks current
JOIN starknet_versions ON current.version_id = starknet_versions.id
JOIN starknet_blocks past ON current.number - 10 = past.number
WHERE starknet_versions.version = '0.12.0'",
)
.context("Preparing select statement")?;
let rows = select.query_map([], |row| {
let current = row.get_block_number(0)?;
let past = row.get_block_number(1)?;
let hash = row.get_block_hash(2)?;
Ok((current, past, hash))
})?;
let mut insert = tx.prepare_cached(
"INSERT INTO storage_updates (block_number, contract_address, storage_address, storage_value) VALUES (?, ?, ?, ?)"
)
.context("Preparing insert statement")?;
for result in rows {
let (current, past, hash) = result?;
let past = StorageValue(Felt::from(past.get()));
insert
.execute(params![¤t, &ContractAddress::ONE, &past, &hash])
.context("Inserting storage update")?;
}
Ok(())
}
|
#![allow(unused_imports)]
use ::error::{
RedisError,
RedisErrorKind
};
use std::io;
use std::io::{
Error as IoError,
Cursor
};
use std::sync::Arc;
use std::str;
use std::collections::{
HashMap
};
use std::fmt::{
Write
};
use bytes::{
BytesMut,
BufMut,
Buf
};
use super::types::{
CR,
LF,
NULL,
FrameKind,
Frame,
SlotRange,
REDIS_CLUSTER_SLOTS,
SlaveNodes,
RedisCommandKind
};
use crc16::{
State,
XMODEM
};
use ::types::{
RedisValue
};
use std::rc::Rc;
// sub module so std::io::Read and std::io::BufRead and bytes::Buf traits don't collide on certain methods (take, etc)
mod readers {
use std::io::prelude::*;
use std::io::Cursor;
use bytes::BytesMut;
use super::{
CR,
LF,
RedisError,
pop_with_error
};
pub fn read_prefix_len(cursor: &mut Cursor<BytesMut>) -> Result<isize, RedisError> {
let _guard = flame_start!("redis:read_prefix_len");
let mut len_buf = Vec::new();
let _ = cursor.read_until(LF as u8, &mut len_buf)?;
pop_with_error(&mut len_buf, LF)?;
pop_with_error(&mut len_buf, CR)?;
let len_str = String::from_utf8(len_buf)?;
let out = len_str.parse::<isize>()?;
Ok(out)
}
pub fn read_to_crlf(cursor: &mut Cursor<BytesMut>) -> Result<Vec<u8>, RedisError> {
let _guard = flame_start!("redis:read_to_crlf");
let mut payload = Vec::new();
cursor.read_until(LF as u8, &mut payload)?;
// check and remove the last two bytes
pop_with_error(&mut payload, LF)?;
pop_with_error(&mut payload, CR)?;
Ok(payload)
}
pub fn read_exact(cursor: &mut Cursor<BytesMut>, len: u64, buf: &mut Vec<u8>) -> Result<usize, RedisError> {
let _guard = flame_start!("redis:read_exact");
let mut take = cursor.take(len);
let out = take.read_to_end(buf)?;
Ok(out)
}
}
pub fn crc16_xmodem(key: &str) -> u16 {
let _guard = flame_start!("redis:crc16_xmodem");
let out = State::<XMODEM>::calculate(key.as_bytes()) % REDIS_CLUSTER_SLOTS;
out
}
/// Maps a key to its hash slot.
pub fn redis_crc16(key: &str) -> u16 {
let _guard = flame_start!("redis:redis_crc16");
let (mut i, mut j): (Option<usize>, Option<usize>) = (None, None);
for (idx, c) in key.chars().enumerate() {
if c == '{' {
i = Some(idx);
break;
}
}
if i.is_none() || (i.is_some() && i.unwrap() == key.len() - 1) {
return crc16_xmodem(key);
}
let i = i.unwrap();
for (idx, c) in key[i+1..].chars().enumerate() {
if c == '}' {
j = Some(idx);
break;
}
}
if j.is_none() {
return crc16_xmodem(key);
}
let j = j.unwrap();
let out = if i+j == key.len() || j == 0 {
crc16_xmodem(key)
}else{
crc16_xmodem(&key[i+1..i+j+1])
};
out
}
pub fn binary_search(slots: &Vec<Rc<SlotRange>>, slot: u16) -> Option<Rc<SlotRange>> {
let _guard = flame_start!("redis:binary_search");
if slot > REDIS_CLUSTER_SLOTS {
return None;
}
let (mut low, mut high) = (0, slots.len() - 1);
while low <= high {
let mid = (low + high) / 2;
if slot < slots[mid].start {
high = mid - 1;
}else if slot > slots[mid].end {
low = mid + 1;
}else{
let out = Some(slots[mid].clone());
return out;
}
}
None
}
#[allow(unused_mut)]
pub fn parse_cluster_nodes(status: String) -> Result<HashMap<String, Vec<SlotRange>>, RedisError> {
let mut out: HashMap<String, Vec<SlotRange>> = HashMap::new();
// build out the slot ranges for the master nodes
for line in status.lines() {
let parts: Vec<&str> = line.split(" ").collect();
if parts.len() < 8 {
return Err(RedisError::new(
RedisErrorKind::ProtocolError, format!("Invalid cluster node status line {}.", line)
));
}
let id = parts[0].to_owned();
if parts[2].contains("master") {
let mut slots: Vec<SlotRange> = Vec::new();
let server = parts[1];
for slot in parts[8..].iter() {
let inner_parts: Vec<&str> = slot.split("-").collect();
if inner_parts.len() < 2 {
return Err(RedisError::new(
RedisErrorKind::ProtocolError, format!("Invalid cluster node hash slot range {}.", slot)
));
}
slots.push(SlotRange {
start: inner_parts[0].parse::<u16>()?,
end: inner_parts[1].parse::<u16>()?,
server: server.to_owned(),
id: id.clone(),
slaves: None
});
}
out.insert(server.to_owned(), slots);
}
}
// attach the slave nodes to the masters from the first loop
for line in status.lines() {
let parts: Vec<&str> = line.split(" ").collect();
if parts.len() < 8 {
return Err(RedisError::new(
RedisErrorKind::ProtocolError, format!("Invalid cluster node status line {}.", line)
));
}
if parts[2].contains("slave") {
let master_id = parts[3].to_owned();
if parts[7] != "connected" {
continue;
}
let mut master: Option<&mut SlotRange> = None;
for (_, mut slots) in out.iter_mut() {
for mut slot in slots.iter_mut() {
if slot.id == master_id {
master = Some(slot);
}
}
}
let master = match master {
Some(slot) => slot,
None => return Err(RedisError::new(
RedisErrorKind::ProtocolError, format!("Invalid cluster node status line for slave node. (Missing master) {}.", line)
))
};
let server = parts[1].to_owned();
let has_slaves = master.slaves.is_some();
if has_slaves {
if let Some(ref mut slaves) = master.slaves {
slaves.add(server);
}
}else{
master.slaves = Some(SlaveNodes::new(vec![server]));
}
}
}
Ok(out)
}
// Extracts the first and rest words of a string and returns them in a tuple.
fn extract_first_word(s: String) -> (String, String) {
let _guard = flame_start!("redis:extract_first_word");
let mut parts = s.split_whitespace();
let first = match parts.next() {
Some(s) => s.to_owned(),
None => "".to_owned()
};
let remaining: Vec<String> = parts.map(|s| s.to_owned()).collect();
let out = (first, remaining.join(" "));
out
}
pub fn better_error(resp: String) -> RedisError {
let _guard = flame_start!("redis:better_error");
let (first, rest) = extract_first_word(resp.clone());
let out = match first.as_ref(){
"" => RedisError::new(RedisErrorKind::Unknown, "No response!"),
"ERR" => RedisError::new(RedisErrorKind::Unknown, rest),
"WRONGTYPE" => RedisError::new(RedisErrorKind::InvalidArgument, rest),
"Invalid" => {
let (second, rest) = extract_first_word(rest);
match second.as_ref() {
"argument(s)" | "Argument" => RedisError::new(RedisErrorKind::InvalidArgument, rest),
"command" | "Command" => RedisError::new(RedisErrorKind::InvalidCommand, rest),
_ => RedisError::new(RedisErrorKind::Unknown, resp),
}
}
_ => RedisError::new(RedisErrorKind::Unknown, resp)
};
out
}
pub fn pop_with_error<T>(d: &mut Vec<T>, expected: char) -> Result<T, RedisError> {
match d.pop() {
Some(c) => Ok(c),
None => Err(RedisError::new(
RedisErrorKind::Unknown, format!("Missing final byte {}.", expected)
))
}
}
pub fn pop_trailing_crlf(d: &mut Cursor<BytesMut>) -> Result<(), RedisError> {
let _guard = flame_start!("redis:pop_trailing_crlf");
if d.remaining() < 2 {
return Err(RedisError::new(
RedisErrorKind::Unknown, "Missing final CRLF."
));
}
let curr_byte = d.get_u8();
let next_byte = d.get_u8();
let out = if curr_byte != CR as u8 || next_byte != LF as u8 {
Err(RedisError::new(
RedisErrorKind::Unknown, "Missing final CRLF."
))
}else{
Ok(())
};
out
}
pub fn write_crlf(bytes: &mut BytesMut) {
let _guard = flame_start!("redis:write_crlf");
bytes.put_u8(CR as u8);
bytes.put_u8(LF as u8);
}
pub fn is_cluster_error(payload: &str) -> Option<Frame> {
let _guard = flame_start!("redis:is_cluster_error");
if payload.starts_with("MOVED") {
// only keep the IP here since this will result in the client's cluster state cache being reset anyways
let parts: Vec<&str> = payload.split(" ").collect();
Some(Frame::Moved(parts[2].to_owned()))
}else if payload.starts_with("ASK") {
let parts: Vec<&str> = payload.split(" ").collect();
Some(Frame::Ask(parts[2].to_owned()))
}else{
None
}
}
// sure hope we have enough error messages
pub fn frame_to_pubsub(frame: Frame) -> Result<(String, RedisValue), RedisError> {
let _guard = flame_start!("redis:frame_to_pubsub");
let out = if let Frame::Array(mut frames) = frame {
if frames.len() != 3 {
return Err(RedisError::new(RedisErrorKind::ProtocolError, "Invalid pubsub message frames."));
}
let payload = frames.pop().unwrap();
let channel = frames.pop().unwrap();
let message_type = frames.pop().unwrap();
let message_type = match message_type.to_string() {
Some(s) => s,
None => {
return Err(RedisError::new(RedisErrorKind::ProtocolError, "Invalid pubsub message type frame."))
}
};
if message_type == "message" {
let channel = match channel.to_string() {
Some(c) => c,
None => {
return Err(RedisError::new(RedisErrorKind::ProtocolError, "Invalid pubsub channel frame."))
}
};
// the payload is a bulk string on pubsub messages
if payload.kind() == FrameKind::BulkString {
let payload = match payload.into_results() {
Ok(mut r) => r.pop(),
Err(e) => return Err(e)
};
if payload.is_none() {
Err(RedisError::new(RedisErrorKind::ProtocolError, "Invalid pubsub channel payload."))
}else{
Ok((channel, payload.unwrap()))
}
}else{
Err(RedisError::new(RedisErrorKind::ProtocolError, "Invalid pubsub payload frame type."))
}
}else{
Err(RedisError::new(RedisErrorKind::ProtocolError, "Invalid pubsub message type."))
}
}else{
Err(RedisError::new(RedisErrorKind::ProtocolError, "Invalid pubsub message frame."))
};
out
}
pub fn ends_with_crlf(bytes: &BytesMut) -> bool {
let _guard = flame_start!("redis:ends_with_crlf");
match bytes.get(bytes.len() - 1) {
Some(b) => if *b != LF as u8 {
return false;
},
None => {
return false
}
};
match bytes.get(bytes.len() - 2) {
Some(b) => if *b != CR as u8 {
return false;
},
None => {
return false
}
};
true
}
pub fn command_args(kind: &RedisCommandKind) -> Option<Frame> {
let _guard = flame_start!("redis:command_args");
// sure would be nice if `if let` worked with other expressions
let frame = if kind.is_cluster_command() {
if let Some(arg) = kind.cluster_args() {
Frame::BulkString(arg.into_bytes())
}else{
return None;
}
}else if kind.is_client_command() {
if let Some(arg) = kind.client_args() {
Frame::BulkString(arg.into_bytes())
}else{
return None;
}
}else if kind.is_config_command() {
if let Some(arg) = kind.config_args() {
Frame::BulkString(arg.into_bytes())
}else{
return None;
}
}else{
return None;
};
Some(frame)
}
pub fn check_expected_size(expected: usize, max: &Option<usize>) -> Result<(), RedisError> {
let _guard = flame_start!("redis:check_expected_size");
let out = match *max {
Some(ref max) => if expected <= *max {
Ok(())
}else{
Err(RedisError::new(
RedisErrorKind::ProtocolError, format!("Max value size exceeded. Actual: {}, Max: {}", expected, max)
))
},
None => Ok(())
};
out
}
#[cfg(not(feature="ignore-auth-error"))]
fn check_auth_error(frame: Frame) -> Frame {
frame
}
// https://i.imgur.com/RjpUxK4.png
#[cfg(feature="ignore-auth-error")]
fn check_auth_error(frame: Frame) -> Frame {
let is_auth_error = match frame {
Frame::Error(ref s) => s == "ERR Client sent AUTH, but no password is set",
_ => false
};
if is_auth_error {
Frame::SimpleString("OK".into())
}else{
frame
}
}
/// Takes in a working buffer of previous bytes, a new set of bytes, and a max_size option.
/// Returns an option with the parsed frame and its size in bytes, including crlf padding and the kind/type byte.
pub fn bytes_to_frames(buf: &mut BytesMut, max_size: &Option<usize>) -> Result<Option<(Frame, usize)>, RedisError> {
let _guard = flame_start!("redis:bytes_to_frames");
let full_len = buf.len();
// operate on a clone of the bytes so split_off calls dont affect the original buffer
let mut bytes = buf.clone();
let mut cursor = Cursor::new(bytes);
if cursor.remaining() < 1 {
return Err(RedisError::new(
RedisErrorKind::ProtocolError, "Empty frame bytes."
));
}
let first_byte = cursor.get_u8();
let data_type = match FrameKind::from_byte(first_byte) {
Some(d) => d,
None => {
return Err(RedisError::new(
RedisErrorKind::ProtocolError, format!("Invalid first byte {}.", first_byte)
))
}
};
let frame = match data_type {
FrameKind::BulkString | FrameKind::Null => {
let expected_len = readers::read_prefix_len(&mut cursor)?;
if expected_len == -1 {
Some((Frame::Null, NULL.len()))
}else if expected_len >= 0 && cursor.remaining() >= expected_len as usize {
let _ = check_expected_size(expected_len as usize, max_size)?;
let mut payload = Vec::with_capacity(expected_len as usize);
let _ = readers::read_exact(&mut cursor, expected_len as u64, &mut payload)?;
// there's still trailing CRLF after bulk strings
pop_trailing_crlf(&mut cursor)?;
Some((Frame::BulkString(payload), cursor.position() as usize))
}else{
None
}
},
FrameKind::Array => {
let expected_len = readers::read_prefix_len(&mut cursor)?;
if expected_len == -1 {
Some((Frame::Null, NULL.len()))
}else if expected_len >= 0 {
let _ = check_expected_size(expected_len as usize, max_size)?;
// cursor now points at the first value's type byte
let mut position = cursor.position() as usize;
let buf = cursor.into_inner();
let mut frames = Vec::with_capacity(expected_len as usize);
let mut unfinished = false;
let mut parsed = 0;
// cut the outer buffer into successively smaller byte slices as the array is parsed,
// and at the end check that the expected number of elements were parsed and that none
// failed while being parsed.
for _ in 0..expected_len {
// operate on a clone of buf in case the array is unfinished
// this just increments a few ref counts
let mut next_bytes = buf.clone().split_off(position);
match bytes_to_frames(&mut next_bytes, max_size)? {
Some((f, size)) => {
frames.push(f);
position = position + size;
parsed = parsed + 1;
},
None => {
unfinished = true;
break;
}
}
}
if unfinished || parsed != expected_len {
None
}else{
Some((Frame::Array(frames), full_len))
}
}else{
return Err(RedisError::new(
RedisErrorKind::ProtocolError, format!("Invalid payload size: {}.", expected_len)
))
}
},
FrameKind::SimpleString => {
let payload = readers::read_to_crlf(&mut cursor)?;
let parsed = String::from_utf8(payload)?;
Some((Frame::SimpleString(parsed), cursor.position() as usize))
},
FrameKind::Error => {
let payload = readers::read_to_crlf(&mut cursor)?;
let parsed = String::from_utf8(payload)?;
let frame = if let Some(frame) = is_cluster_error(&parsed) {
frame
}else{
Frame::Error(parsed)
};
Some((frame, cursor.position() as usize))
},
FrameKind::Integer => {
let payload = readers::read_to_crlf(&mut cursor)?;
let parsed = String::from_utf8(payload)?;
let int_val: i64 = parsed.parse()?;
Some((Frame::Integer(int_val), cursor.position() as usize))
},
_ => return Err(RedisError::new(
RedisErrorKind::ProtocolError, "Unknown frame."
))
};
Ok(frame.map(|(f, s)| (check_auth_error(f), s)))
}
pub fn frames_to_bytes(frame: &mut Frame, bytes: &mut BytesMut) -> Result<(), RedisError> {
let _guard = flame_start!("redis:frames_to_bytes");
let frame_byte = frame.kind().to_byte();
match *frame {
Frame::BulkString(ref mut buf) => {
let len_str = buf.len().to_string();
bytes.reserve(1 + len_str.bytes().len() + 2 + buf.len() + 2);
trace!("Send {:?} bytes", bytes.len());
bytes.put_u8(frame_byte);
bytes.write_str(&len_str)?;
write_crlf(bytes);
for byte in buf.drain(..) {
bytes.put_u8(byte);
}
write_crlf(bytes);
},
Frame::Array(ref mut inner_frames) => {
let inner_len = inner_frames.len().to_string();
bytes.reserve(1 + inner_len.bytes().len() + 2);
trace!("Send {:?} bytes", bytes.len());
bytes.put_u8(frame_byte);
bytes.write_str(&inner_len)?;
write_crlf(bytes);
for mut inner_frame in inner_frames.drain(..) {
frames_to_bytes(&mut inner_frame, bytes)?;
}
// no trailing crlf here, the inner values add that
},
Frame::Null => {
bytes.reserve(1 + NULL.bytes().len());
trace!("Send {:?} bytes", bytes.len());
bytes.put_u8(frame_byte);
bytes.write_str(NULL)?;
},
// only an array, bulk strings, and null values are allowed on outbound frames
// the caller is responsible for coercing other types to bulk strings on the way out
_ => {
return Err(RedisError::new(
RedisErrorKind::ProtocolError, format!("Invalid outgoing data frame type {:?}.", frame.kind())
))
}
};
Ok(())
}
// ------------------
#[cfg(test)]
mod tests {
use super::*;
use super::super::types::*;
use super::super::super::types::*;
// int tests
#[test]
fn should_encode_llen_req_example() {
let mut args: RedisCommand = RedisCommand {
kind: RedisCommandKind::LLen,
args: vec![
"mylist".into()
],
tx: None
};
let expected = "*2\r\n$4\r\nLLEN\r\n$6\r\nmylist\r\n";
let mut frame = args.to_frame().unwrap();
let mut bytes = BytesMut::new();
frames_to_bytes(&mut frame, &mut bytes).unwrap();
assert_eq!(bytes, expected.as_bytes());
assert_eq!(args.args.len(), 0);
}
#[test]
fn should_decode_llen_res_example() {
let expected = Some((Frame::Integer(48293), 8));
let mut bytes: BytesMut = ":48293\r\n".into();
let actual = bytes_to_frames(&mut bytes, &None).unwrap();
assert_eq!(actual, expected);
}
#[test]
fn should_encode_incr_req_example() {
let mut args: RedisCommand = RedisCommand {
kind: RedisCommandKind::Incr,
args: vec![
"mykey".into()
],
tx: None
};
let expected = "*2\r\n$4\r\nINCR\r\n$5\r\nmykey\r\n";
let mut frame = args.to_frame().unwrap();
let mut bytes = BytesMut::new();
frames_to_bytes(&mut frame, &mut bytes).unwrap();
assert_eq!(bytes, expected.as_bytes());
assert_eq!(args.args.len(), 0);
}
#[test]
fn should_decode_incr_req_example() {
let expected = Some((Frame::Integer(666), 6));
let mut bytes: BytesMut = ":666\r\n".into();
let actual = bytes_to_frames(&mut bytes, &None).unwrap();
assert_eq!(actual, expected);
}
#[test]
fn should_encode_bitcount_req_example() {
let mut args: RedisCommand = RedisCommand {
kind: RedisCommandKind::BitCount,
args: vec![
"mykey".into()
],
tx: None
};
let expected = "*2\r\n$8\r\nBITCOUNT\r\n$5\r\nmykey\r\n";
let mut frame = args.to_frame().unwrap();
let mut bytes = BytesMut::new();
frames_to_bytes(&mut frame, &mut bytes).unwrap();
assert_eq!(bytes, expected.as_bytes());
}
#[test]
fn should_correctly_crc16_123456789() {
let key = "123456789";
// 31C3
let expected: u16 = 12739;
let actual = redis_crc16(key);
assert_eq!(actual, expected);
}
#[test]
fn should_correctly_crc16_with_brackets() {
let key = "foo{123456789}bar";
// 31C3
let expected: u16 = 12739;
let actual = redis_crc16(key);
assert_eq!(actual, expected);
}
#[test]
fn should_correctly_crc16_with_brackets_no_padding() {
let key = "{123456789}";
// 31C3
let expected: u16 = 12739;
let actual = redis_crc16(key);
assert_eq!(actual, expected);
}
#[test]
fn should_correctly_crc16_with_invalid_brackets_lhs() {
let key = "foo{123456789";
// 288A
let expected: u16 = 10378;
let actual = redis_crc16(key);
assert_eq!(actual, expected);
}
#[test]
fn should_correctly_crc16_with_invalid_brackets_rhs() {
let key = "foo}123456789";
// 5B35 = 23349, 23349 % 16384 = 6965
let expected: u16 = 6965;
let actual = redis_crc16(key);
assert_eq!(actual, expected);
}
#[test]
fn should_correctly_crc16_with_random_string() {
let key = "8xjx7vWrfPq54mKfFD3Y1CcjjofpnAcQ";
// 127.0.0.1:30001> cluster keyslot 8xjx7vWrfPq54mKfFD3Y1CcjjofpnAcQ
// (integer) 5458
let expected: u16 = 5458;
let actual = redis_crc16(key);
assert_eq!(actual, expected);
}
#[test]
fn should_parse_cluster_node_status() {
let status = "07c37dfeb235213a872192d90877d0cd55635b91 127.0.0.1:30004 slave e7d1eecce10fd6bb5eb35b9f99a514335d9ba9ca 0 1426238317239 4 connected
67ed2db8d677e59ec4a4cefb06858cf2a1a89fa1 127.0.0.1:30002 master - 0 1426238316232 2 connected 5461-10922
292f8b365bb7edb5e285caf0b7e6ddc7265d2f4f 127.0.0.1:30003 master - 0 1426238318243 3 connected 10923-16383
6ec23923021cf3ffec47632106199cb7f496ce01 127.0.0.1:30005 slave 67ed2db8d677e59ec4a4cefb06858cf2a1a89fa1 0 1426238316232 5 connected
824fe116063bc5fcf9f4ffd895bc17aee7731ac3 127.0.0.1:30006 slave 292f8b365bb7edb5e285caf0b7e6ddc7265d2f4f 0 1426238317741 6 connected
e7d1eecce10fd6bb5eb35b9f99a514335d9ba9ca 127.0.0.1:30001 myself,master - 0 0 1 connected 0-5460";
let mut expected: HashMap<String, Vec<SlotRange>> = HashMap::new();
expected.insert("127.0.0.1:30002".to_owned(), vec![SlotRange {
start: 5461,
end: 10922,
server: "127.0.0.1:30002".to_owned(),
id: "67ed2db8d677e59ec4a4cefb06858cf2a1a89fa1".to_owned(),
slaves: Some(SlaveNodes::new(vec![
"127.0.0.1:30005".to_owned()
]))
}]);
expected.insert("127.0.0.1:30003".to_owned(), vec![SlotRange {
start: 10923,
end: 16383,
server: "127.0.0.1:30003".to_owned(),
id: "292f8b365bb7edb5e285caf0b7e6ddc7265d2f4f".to_owned(),
slaves: Some(SlaveNodes::new(vec![
"127.0.0.1:30006".to_owned()
]))
}]);
expected.insert("127.0.0.1:30001".to_owned(), vec![SlotRange {
start: 0,
end: 5460,
server: "127.0.0.1:30001".to_owned(),
id: "e7d1eecce10fd6bb5eb35b9f99a514335d9ba9ca".to_owned(),
slaves: Some(SlaveNodes::new(vec![
"127.0.0.1:30004".to_owned()
]))
}]);
let actual = match parse_cluster_nodes(status.to_owned()) {
Ok(h) => h,
Err(e) => panic!("{}", e)
};
assert_eq!(actual, expected);
}
#[test]
fn should_decode_simple_string_test() {
let expected = Some((Frame::SimpleString("string".to_owned()), 9));
let mut bytes: BytesMut = "+string\r\n".into();
let actual = bytes_to_frames(&mut bytes, &None).unwrap();
assert_eq!(actual, expected);
}
#[test]
fn should_decode_bulk_string_test() {
let string1 = vec!['f' as u8 ,'o' as u8, 'o' as u8];
let expected = Some((Frame::BulkString(string1), 9));
let mut bytes: BytesMut = "$3\r\nfoo\r\n".into();
let actual = bytes_to_frames(&mut bytes, &None).unwrap();
assert_eq!(actual, expected);
}
#[test]
fn should_decode_array_simple_strings_test() {
let mut frame_vec = Vec::new();
frame_vec.push(Frame::SimpleString("Foo".to_owned()));
frame_vec.push(Frame::SimpleString("Bar".to_owned()));
let expected = Some((Frame::Array(frame_vec), 16));
let mut bytes: BytesMut = "*2\r\n+Foo\r\n+Bar\r\n".into();
let actual = bytes_to_frames(&mut bytes, &None).unwrap();
assert_eq!(actual, expected);
}
#[test]
fn should_encode_array_bulk_string_test() {
let mut args: RedisCommand = RedisCommand {
kind: RedisCommandKind::Watch,
args: vec![
"HONOR!".into(),
"Apple Jacks".into()
],
tx: None
};
let expected = "*3\r\n$5\r\nWATCH\r\n$6\r\nHONOR!\r\n$11\r\nApple Jacks\r\n";
let mut frame = args.to_frame().unwrap();
let mut bytes = BytesMut::new();
frames_to_bytes(&mut frame, &mut bytes).unwrap();
assert_eq!(bytes, expected.as_bytes());
}
#[test]
fn should_decode_array_bulk_string_test() {
let string1 = vec!['f' as u8, 'o' as u8, 'o' as u8];
let string2 = vec!['b' as u8, 'a' as u8, 'r' as u8];
let mut frame_vec = Vec::new();
frame_vec.push(Frame::BulkString(string1));
frame_vec.push(Frame::BulkString(string2));
let expected = Some((Frame::Array(frame_vec), 22));
let mut bytes: BytesMut = "*2\r\n$3\r\nfoo\r\n$3\r\nbar\r\n".into();
let actual = bytes_to_frames(&mut bytes, &None).unwrap();
assert_eq!(actual, expected);
}
// test cases from afl
pub mod fuzz {
use super::*;
#[test]
// panicked at 'assertion failed: self.remaining() >= dst.len()'
fn should_handle_crash_1() {
// 24 34 80 ff
let b = vec![
36 as u8,
52 as u8,
128 as u8,
255 as u8
];
let mut bytes = BytesMut::from(b);
let _ = bytes_to_frames(&mut bytes, &None);
}
#[test]
// fatal runtime error: allocator memory exhausted
fn should_handle_crash_2() {
let max = Some(10000);
// 24 35 35 35 35 35 35 35 35 35 35 35 35 35 35
let b = vec![
36 as u8,
53 as u8,
53 as u8,
53 as u8,
53 as u8,
53 as u8,
53 as u8,
53 as u8,
53 as u8,
53 as u8,
53 as u8,
53 as u8,
53 as u8,
53 as u8,
53 as u8
];
let mut bytes = BytesMut::from(b);
let _ = bytes_to_frames(&mut bytes, &max);
}
#[test]
// panicked at 'assertion failed: self.remaining() >= dst.len()
fn should_handle_crash_3() {
// 2a 35 00 20
let b = vec![
42 as u8,
53 as u8,
0 as u8,
32 as u8
];
let mut bytes = BytesMut::from(b);
let _ = bytes_to_frames(&mut bytes, &None);
}
#[test]
// fatal runtime error: allocator memory exhausted
fn should_handle_crash_4() {
let max = Some(10000);
// 2a 31 39 39 39 39 39 39 39 39 39 39 39 39 39 39 30 39 34
let b = vec![
42 as u8,
49 as u8,
57 as u8,
57 as u8,
57 as u8,
57 as u8,
57 as u8,
57 as u8,
57 as u8,
57 as u8,
57 as u8,
57 as u8,
57 as u8,
57 as u8,
57 as u8,
57 as u8,
48 as u8,
57 as u8,
52 as u8
];
let mut bytes = BytesMut::from(b);
let _ = bytes_to_frames(&mut bytes, &max);
}
}
}
|
#[doc = "Reader of register APB2ENR"]
pub type R = crate::R<u32, super::APB2ENR>;
#[doc = "Writer for register APB2ENR"]
pub type W = crate::W<u32, super::APB2ENR>;
#[doc = "Register APB2ENR `reset()`'s with value 0"]
impl crate::ResetValue for super::APB2ENR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `SAI1EN`"]
pub type SAI1EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SAI1EN`"]
pub struct SAI1EN_W<'a> {
w: &'a mut W,
}
impl<'a> SAI1EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 21)) | (((value as u32) & 0x01) << 21);
self.w
}
}
#[doc = "Reader of field `TIM16EN`"]
pub type TIM16EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TIM16EN`"]
pub struct TIM16EN_W<'a> {
w: &'a mut W,
}
impl<'a> TIM16EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);
self.w
}
}
#[doc = "Reader of field `TIM15EN`"]
pub type TIM15EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TIM15EN`"]
pub struct TIM15EN_W<'a> {
w: &'a mut W,
}
impl<'a> TIM15EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);
self.w
}
}
#[doc = "Reader of field `USART1EN`"]
pub type USART1EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `USART1EN`"]
pub struct USART1EN_W<'a> {
w: &'a mut W,
}
impl<'a> USART1EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);
self.w
}
}
#[doc = "Reader of field `SPI1EN`"]
pub type SPI1EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SPI1EN`"]
pub struct SPI1EN_W<'a> {
w: &'a mut W,
}
impl<'a> SPI1EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
#[doc = "Reader of field `TIM1EN`"]
pub type TIM1EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TIM1EN`"]
pub struct TIM1EN_W<'a> {
w: &'a mut W,
}
impl<'a> TIM1EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
#[doc = "Reader of field `SDMMCEN`"]
pub type SDMMCEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SDMMCEN`"]
pub struct SDMMCEN_W<'a> {
w: &'a mut W,
}
impl<'a> SDMMCEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "Reader of field `FIREWALLEN`"]
pub type FIREWALLEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `FIREWALLEN`"]
pub struct FIREWALLEN_W<'a> {
w: &'a mut W,
}
impl<'a> FIREWALLEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "Reader of field `SYSCFGEN`"]
pub type SYSCFGEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SYSCFGEN`"]
pub struct SYSCFGEN_W<'a> {
w: &'a mut W,
}
impl<'a> SYSCFGEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
impl R {
#[doc = "Bit 21 - SAI1 clock enable"]
#[inline(always)]
pub fn sai1en(&self) -> SAI1EN_R {
SAI1EN_R::new(((self.bits >> 21) & 0x01) != 0)
}
#[doc = "Bit 17 - TIM16 timer clock enable"]
#[inline(always)]
pub fn tim16en(&self) -> TIM16EN_R {
TIM16EN_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 16 - TIM15 timer clock enable"]
#[inline(always)]
pub fn tim15en(&self) -> TIM15EN_R {
TIM15EN_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 14 - USART1clock enable"]
#[inline(always)]
pub fn usart1en(&self) -> USART1EN_R {
USART1EN_R::new(((self.bits >> 14) & 0x01) != 0)
}
#[doc = "Bit 12 - SPI1 clock enable"]
#[inline(always)]
pub fn spi1en(&self) -> SPI1EN_R {
SPI1EN_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 11 - TIM1 timer clock enable"]
#[inline(always)]
pub fn tim1en(&self) -> TIM1EN_R {
TIM1EN_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 10 - SDMMC clock enable"]
#[inline(always)]
pub fn sdmmcen(&self) -> SDMMCEN_R {
SDMMCEN_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 7 - Firewall clock enable"]
#[inline(always)]
pub fn firewallen(&self) -> FIREWALLEN_R {
FIREWALLEN_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 0 - SYSCFG clock enable"]
#[inline(always)]
pub fn syscfgen(&self) -> SYSCFGEN_R {
SYSCFGEN_R::new((self.bits & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 21 - SAI1 clock enable"]
#[inline(always)]
pub fn sai1en(&mut self) -> SAI1EN_W {
SAI1EN_W { w: self }
}
#[doc = "Bit 17 - TIM16 timer clock enable"]
#[inline(always)]
pub fn tim16en(&mut self) -> TIM16EN_W {
TIM16EN_W { w: self }
}
#[doc = "Bit 16 - TIM15 timer clock enable"]
#[inline(always)]
pub fn tim15en(&mut self) -> TIM15EN_W {
TIM15EN_W { w: self }
}
#[doc = "Bit 14 - USART1clock enable"]
#[inline(always)]
pub fn usart1en(&mut self) -> USART1EN_W {
USART1EN_W { w: self }
}
#[doc = "Bit 12 - SPI1 clock enable"]
#[inline(always)]
pub fn spi1en(&mut self) -> SPI1EN_W {
SPI1EN_W { w: self }
}
#[doc = "Bit 11 - TIM1 timer clock enable"]
#[inline(always)]
pub fn tim1en(&mut self) -> TIM1EN_W {
TIM1EN_W { w: self }
}
#[doc = "Bit 10 - SDMMC clock enable"]
#[inline(always)]
pub fn sdmmcen(&mut self) -> SDMMCEN_W {
SDMMCEN_W { w: self }
}
#[doc = "Bit 7 - Firewall clock enable"]
#[inline(always)]
pub fn firewallen(&mut self) -> FIREWALLEN_W {
FIREWALLEN_W { w: self }
}
#[doc = "Bit 0 - SYSCFG clock enable"]
#[inline(always)]
pub fn syscfgen(&mut self) -> SYSCFGEN_W {
SYSCFGEN_W { w: self }
}
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fmt::Display;
use common_exception::Result;
use common_exception::Span;
use common_meta_app::principal::PrincipalIdentity;
use common_meta_app::principal::UserIdentity;
use crate::ast::*;
use crate::visitors::Visitor;
pub fn format_statement(stmt: Statement) -> Result<String> {
let mut visitor = AstFormatVisitor::new();
visitor.visit_statement(&stmt);
let format_ctx = visitor.children.pop().unwrap();
format_ctx.format_pretty()
}
#[derive(Clone)]
pub struct AstFormatContext {
name: String,
children_num: usize,
alias: Option<String>,
}
impl AstFormatContext {
pub fn new(name: String) -> Self {
Self {
name,
children_num: 0,
alias: None,
}
}
pub fn with_children(name: String, children_num: usize) -> Self {
Self {
name,
children_num,
alias: None,
}
}
pub fn with_children_alias(name: String, children_num: usize, alias: Option<String>) -> Self {
Self {
name,
children_num,
alias,
}
}
}
impl Display for AstFormatContext {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.alias {
Some(alias) => {
if self.children_num > 0 {
write!(
f,
"{} (alias {}) (children {})",
self.name, alias, self.children_num
)
} else {
write!(f, "{} (alias {})", self.name, alias)
}
}
None => {
if self.children_num > 0 {
write!(f, "{} (children {})", self.name, self.children_num)
} else {
write!(f, "{}", self.name)
}
}
}
}
}
pub struct AstFormatVisitor {
children: Vec<FormatTreeNode<AstFormatContext>>,
}
impl AstFormatVisitor {
pub fn new() -> Self {
Self { children: vec![] }
}
}
impl<'ast> Visitor<'ast> for AstFormatVisitor {
fn visit_identifier(&mut self, ident: &'ast Identifier) {
let format_ctx = AstFormatContext::new(format!("Identifier {ident}"));
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
fn visit_database_ref(
&mut self,
catalog: &'ast Option<Identifier>,
database: &'ast Identifier,
) {
let mut name = String::new();
name.push_str("DatabaseIdentifier ");
if let Some(catalog) = catalog {
name.push_str(&catalog.to_string());
name.push('.');
}
name.push_str(&database.to_string());
let format_ctx = AstFormatContext::new(name);
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
fn visit_table_ref(
&mut self,
catalog: &'ast Option<Identifier>,
database: &'ast Option<Identifier>,
table: &'ast Identifier,
) {
let mut name = String::new();
name.push_str("TableIdentifier ");
if let Some(catalog) = catalog {
name.push_str(&catalog.to_string());
name.push('.');
}
if let Some(database) = database {
name.push_str(&database.to_string());
name.push('.');
}
name.push_str(&table.to_string());
let format_ctx = AstFormatContext::new(name);
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
fn visit_column_ref(
&mut self,
_span: Span,
database: &'ast Option<Identifier>,
table: &'ast Option<Identifier>,
column: &'ast Identifier,
) {
let mut name = String::new();
name.push_str("ColumnIdentifier ");
if let Some(database) = database {
name.push_str(&database.to_string());
name.push('.');
}
if let Some(table) = table {
name.push_str(&table.to_string());
name.push('.');
}
name.push_str(&column.to_string());
let format_ctx = AstFormatContext::new(name);
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
fn visit_is_null(&mut self, _span: Span, expr: &'ast Expr, not: bool) {
let name = if not {
"Function IsNotNull".to_string()
} else {
"Function IsNull".to_string()
};
self.visit_expr(expr);
let child = self.children.pop().unwrap();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_is_distinct_from(
&mut self,
_span: Span,
left: &'ast Expr,
right: &'ast Expr,
not: bool,
) {
let name = if not {
"Function IsNotDistinctFrom".to_string()
} else {
"Function IsDistinctFrom".to_string()
};
self.visit_expr(left);
let child1 = self.children.pop().unwrap();
self.visit_expr(right);
let child2 = self.children.pop().unwrap();
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![child1, child2]);
self.children.push(node);
}
fn visit_in_list(&mut self, _span: Span, expr: &'ast Expr, list: &'ast [Expr], not: bool) {
self.visit_expr(expr);
let expr_child = self.children.pop().unwrap();
let list_format_ctx = AstFormatContext::with_children("List".to_string(), list.len());
let mut list_children = Vec::with_capacity(list.len());
for expr in list.iter() {
self.visit_expr(expr);
list_children.push(self.children.pop().unwrap());
}
let list_child = FormatTreeNode::with_children(list_format_ctx, list_children);
let name = if not {
"Function NotIn".to_string()
} else {
"Function In".to_string()
};
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![expr_child, list_child]);
self.children.push(node);
}
fn visit_in_subquery(
&mut self,
_span: Span,
expr: &'ast Expr,
subquery: &'ast Query,
not: bool,
) {
self.visit_expr(expr);
let expr_child = self.children.pop().unwrap();
self.visit_query(subquery);
let subquery_child = self.children.pop().unwrap();
let name = if not {
"Function NotInSubquery".to_string()
} else {
"Function InSubquery".to_string()
};
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![expr_child, subquery_child]);
self.children.push(node);
}
fn visit_between(
&mut self,
_span: Span,
expr: &'ast Expr,
low: &'ast Expr,
high: &'ast Expr,
not: bool,
) {
self.visit_expr(expr);
let expr_child = self.children.pop().unwrap();
self.visit_expr(low);
let low_child = self.children.pop().unwrap();
self.visit_expr(high);
let high_child = self.children.pop().unwrap();
let between_format_ctx = AstFormatContext::with_children("Between".to_string(), 2);
let between_child =
FormatTreeNode::with_children(between_format_ctx, vec![low_child, high_child]);
let name = if not {
"Function NotBetween".to_string()
} else {
"Function Between".to_string()
};
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![expr_child, between_child]);
self.children.push(node);
}
fn visit_binary_op(
&mut self,
_span: Span,
op: &'ast BinaryOperator,
left: &'ast Expr,
right: &'ast Expr,
) {
self.visit_expr(left);
let left_child = self.children.pop().unwrap();
self.visit_expr(right);
let right_child = self.children.pop().unwrap();
let name = format!("Function {op}");
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![left_child, right_child]);
self.children.push(node);
}
fn visit_unary_op(&mut self, _span: Span, op: &'ast UnaryOperator, expr: &'ast Expr) {
self.visit_expr(expr);
let expr_child = self.children.pop().unwrap();
let name = format!("Function {op}");
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![expr_child]);
self.children.push(node);
}
fn visit_cast(
&mut self,
_span: Span,
expr: &'ast Expr,
target_type: &'ast TypeName,
_pg_style: bool,
) {
self.visit_expr(expr);
let expr_child = self.children.pop().unwrap();
let target_format_ctx = AstFormatContext::new(format!("TargetType {target_type}"));
let target_child = FormatTreeNode::new(target_format_ctx);
let name = "Function Cast".to_string();
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![expr_child, target_child]);
self.children.push(node);
}
fn visit_try_cast(&mut self, _span: Span, expr: &'ast Expr, target_type: &'ast TypeName) {
self.visit_expr(expr);
let expr_child = self.children.pop().unwrap();
let target_format_ctx = AstFormatContext::new(format!("TargetType {target_type}"));
let target_child = FormatTreeNode::new(target_format_ctx);
let name = "Function TryCast".to_string();
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![expr_child, target_child]);
self.children.push(node);
}
fn visit_extract(&mut self, _span: Span, kind: &'ast IntervalKind, expr: &'ast Expr) {
self.visit_expr(expr);
let expr_child = self.children.pop().unwrap();
let kind_format_ctx = AstFormatContext::new(format!("IntervalKind {kind}"));
let kind_child = FormatTreeNode::new(kind_format_ctx);
let name = "Function Extract".to_string();
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![expr_child, kind_child]);
self.children.push(node);
}
fn visit_position(&mut self, _span: Span, substr_expr: &'ast Expr, str_expr: &'ast Expr) {
self.visit_expr(substr_expr);
let substr_expr_child = self.children.pop().unwrap();
self.visit_expr(str_expr);
let str_expr_child = self.children.pop().unwrap();
let name = "Function Position".to_string();
let format_ctx = AstFormatContext::with_children(name, 2);
let node =
FormatTreeNode::with_children(format_ctx, vec![substr_expr_child, str_expr_child]);
self.children.push(node);
}
fn visit_substring(
&mut self,
_span: Span,
expr: &'ast Expr,
substring_from: &'ast Expr,
substring_for: &'ast Option<Box<Expr>>,
) {
let mut children = Vec::with_capacity(1);
self.visit_expr(expr);
children.push(self.children.pop().unwrap());
self.visit_expr(substring_from);
children.push(self.children.pop().unwrap());
if let Some(substring_for) = substring_for {
self.visit_expr(substring_for);
children.push(self.children.pop().unwrap());
}
let name = "Function Substring".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_trim(
&mut self,
_span: Span,
expr: &'ast Expr,
trim_where: &'ast Option<(TrimWhere, Box<Expr>)>,
) {
let mut children = Vec::with_capacity(1);
self.visit_expr(expr);
children.push(self.children.pop().unwrap());
if let Some((_, trim_expr)) = trim_where {
self.visit_expr(trim_expr);
children.push(self.children.pop().unwrap());
}
let name = "Function Trim".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_literal(&mut self, _span: Span, lit: &'ast Literal) {
let name = format!("Literal {:?}", lit);
let format_ctx = AstFormatContext::new(name);
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
fn visit_count_all(&mut self, _span: Span) {
let name = "Function CountAll".to_string();
let format_ctx = AstFormatContext::new(name);
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
fn visit_tuple(&mut self, _span: Span, elements: &'ast [Expr]) {
let mut children = Vec::with_capacity(elements.len());
for element in elements.iter() {
self.visit_expr(element);
children.push(self.children.pop().unwrap());
}
let name = "Literal Tuple".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_function_call(
&mut self,
_span: Span,
distinct: bool,
name: &'ast Identifier,
args: &'ast [Expr],
_params: &'ast [Literal],
_over: &'ast Option<WindowSpec>,
) {
let mut children = Vec::with_capacity(args.len());
for arg in args.iter() {
self.visit_expr(arg);
children.push(self.children.pop().unwrap());
}
let node_name = if distinct {
format!("Function {name}Distinct")
} else {
format!("Function {name}")
};
let format_ctx = AstFormatContext::with_children(node_name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_case_when(
&mut self,
_span: Span,
operand: &'ast Option<Box<Expr>>,
conditions: &'ast [Expr],
results: &'ast [Expr],
else_result: &'ast Option<Box<Expr>>,
) {
let mut children = Vec::new();
if let Some(operand) = operand {
self.visit_expr(operand);
children.push(self.children.pop().unwrap());
}
if !conditions.is_empty() {
let mut conditions_children = Vec::with_capacity(conditions.len());
for condition in conditions.iter() {
self.visit_expr(condition);
conditions_children.push(self.children.pop().unwrap());
}
let conditions_name = "Conditions".to_string();
let conditions_format_ctx =
AstFormatContext::with_children(conditions_name, conditions_children.len());
let conditions_node =
FormatTreeNode::with_children(conditions_format_ctx, conditions_children);
children.push(conditions_node)
}
if !results.is_empty() {
let mut results_children = Vec::with_capacity(results.len());
for result in results.iter() {
self.visit_expr(result);
results_children.push(self.children.pop().unwrap());
}
let results_name = "Results".to_string();
let results_format_ctx =
AstFormatContext::with_children(results_name, results_children.len());
let results_node = FormatTreeNode::with_children(results_format_ctx, results_children);
children.push(results_node)
}
if let Some(else_result) = else_result {
self.visit_expr(else_result);
let else_child = self.children.pop().unwrap();
let else_name = "ElseResult".to_string();
let else_format_ctx = AstFormatContext::with_children(else_name, 1);
let else_node = FormatTreeNode::with_children(else_format_ctx, vec![else_child]);
children.push(else_node)
}
let name = "Function Case".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_exists(&mut self, _span: Span, not: bool, subquery: &'ast Query) {
self.visit_query(subquery);
let child = self.children.pop().unwrap();
let name = if not {
"Function NotExists".to_string()
} else {
"Function Exists".to_string()
};
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_subquery(
&mut self,
_span: Span,
modifier: &'ast Option<SubqueryModifier>,
subquery: &'ast Query,
) {
self.visit_query(subquery);
let child = self.children.pop().unwrap();
let name = if let Some(modifier) = modifier {
format!("Function Subquery {modifier}")
} else {
"Function Subquery".to_string()
};
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_map_access(&mut self, _span: Span, expr: &'ast Expr, accessor: &'ast MapAccessor) {
self.visit_expr(expr);
let expr_child = self.children.pop().unwrap();
let key_name = match accessor {
MapAccessor::Bracket { key } => format!("accessor [{key}]"),
MapAccessor::Period { key } => format!("accessor .{key}"),
MapAccessor::PeriodNumber { key } => format!("accessor .{key}"),
MapAccessor::Colon { key } => format!("accessor :{key}"),
};
let key_format_ctx = AstFormatContext::new(key_name);
let key_child = FormatTreeNode::new(key_format_ctx);
let name = "Function MapAccess".to_string();
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![expr_child, key_child]);
self.children.push(node);
}
fn visit_array(&mut self, _span: Span, exprs: &'ast [Expr]) {
let mut children = Vec::with_capacity(exprs.len());
for expr in exprs.iter() {
self.visit_expr(expr);
children.push(self.children.pop().unwrap());
}
let name = "Literal Array".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_map(&mut self, _span: Span, kvs: &'ast [(Expr, Expr)]) {
let mut children = Vec::with_capacity(kvs.len());
for (key_expr, val_expr) in kvs.iter() {
self.visit_expr(key_expr);
children.push(self.children.pop().unwrap());
self.visit_expr(val_expr);
children.push(self.children.pop().unwrap());
}
let name = "Literal Map".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_interval(&mut self, _span: Span, expr: &'ast Expr, unit: &'ast IntervalKind) {
self.visit_expr(expr);
let child = self.children.pop().unwrap();
let name = format!("Function Interval{}", unit);
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_date_add(
&mut self,
_span: Span,
unit: &'ast IntervalKind,
interval: &'ast Expr,
date: &'ast Expr,
) {
self.visit_expr(date);
let date_child = self.children.pop().unwrap();
self.visit_expr(interval);
let interval_child = self.children.pop().unwrap();
let name = format!("Function DateAdd{}", unit);
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![date_child, interval_child]);
self.children.push(node);
}
fn visit_date_sub(
&mut self,
_span: Span,
unit: &'ast IntervalKind,
interval: &'ast Expr,
date: &'ast Expr,
) {
self.visit_expr(date);
let date_child = self.children.pop().unwrap();
self.visit_expr(interval);
let interval_child = self.children.pop().unwrap();
let name = format!("Function DateSub{}", unit);
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![date_child, interval_child]);
self.children.push(node);
}
fn visit_date_trunc(&mut self, _span: Span, unit: &'ast IntervalKind, date: &'ast Expr) {
self.visit_expr(date);
let child = self.children.pop().unwrap();
let name = format!("Function DateTrunc{}", unit);
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_query(&mut self, query: &'ast Query) {
let mut children = Vec::new();
if let Some(with) = &query.with {
self.visit_with(with);
children.push(self.children.pop().unwrap());
}
self.visit_set_expr(&query.body);
children.push(self.children.pop().unwrap());
if !query.order_by.is_empty() {
let order_by_format_ctx =
AstFormatContext::with_children("OrderByList".to_string(), query.order_by.len());
let mut order_by_children = Vec::with_capacity(query.order_by.len());
for order_by in query.order_by.iter() {
self.visit_order_by(order_by);
order_by_children.push(self.children.pop().unwrap());
}
let order_by_node =
FormatTreeNode::with_children(order_by_format_ctx, order_by_children);
children.push(order_by_node);
}
if !query.limit.is_empty() {
let limit_format_ctx =
AstFormatContext::with_children("LimitList".to_string(), query.limit.len());
let mut limit_children = Vec::with_capacity(query.limit.len());
for limit in query.limit.iter() {
self.visit_expr(limit);
limit_children.push(self.children.pop().unwrap());
}
let limit_node = FormatTreeNode::with_children(limit_format_ctx, limit_children);
children.push(limit_node);
}
if let Some(offset) = &query.offset {
self.visit_expr(offset);
let offset_child = self.children.pop().unwrap();
let offset_format_ctx = AstFormatContext::with_children("OffsetElement".to_string(), 1);
let offset_node = FormatTreeNode::with_children(offset_format_ctx, vec![offset_child]);
children.push(offset_node);
}
let name = "Query".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_explain(&mut self, kind: &'ast ExplainKind, query: &'ast Statement) {
self.visit_statement(query);
let child = self.children.pop().unwrap();
let name = format!("Explain{}", match kind {
ExplainKind::Ast(_) => "Ast",
ExplainKind::Syntax(_) => "Syntax",
ExplainKind::Graph => "Graph",
ExplainKind::Pipeline => "Pipeline",
ExplainKind::Fragments => "Fragments",
ExplainKind::Raw => "Raw",
ExplainKind::Plan => "Plan",
ExplainKind::Memo(_) => "Memo",
ExplainKind::JOIN => "JOIN",
ExplainKind::AnalyzePlan => "Analyze",
});
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_copy(&mut self, copy: &'ast CopyStmt) {
let mut children = Vec::new();
self.visit_copy_unit(©.src);
children.push(self.children.pop().unwrap());
self.visit_copy_unit(©.dst);
children.push(self.children.pop().unwrap());
if let Some(files) = ©.files {
let mut files_children = Vec::with_capacity(files.len());
for file in files.iter() {
let file_name = format!("File {}", file);
let file_format_ctx = AstFormatContext::new(file_name);
let file_node = FormatTreeNode::new(file_format_ctx);
files_children.push(file_node);
}
let files_name = "Files".to_string();
let files_format_ctx =
AstFormatContext::with_children(files_name, files_children.len());
let files_node = FormatTreeNode::with_children(files_format_ctx, files_children);
children.push(files_node);
}
if let Some(pattern) = ©.pattern {
let pattern_name = format!("Pattern {}", pattern);
let pattern_format_ctx = AstFormatContext::new(pattern_name);
let pattern_node = FormatTreeNode::new(pattern_format_ctx);
children.push(pattern_node);
}
if !copy.file_format.is_empty() {
let mut file_formats_children = Vec::with_capacity(copy.file_format.len());
for (k, v) in copy.file_format.iter() {
let file_format_name = format!("FileFormat {} = {:?}", k, v);
let file_format_format_ctx = AstFormatContext::new(file_format_name);
let file_format_node = FormatTreeNode::new(file_format_format_ctx);
file_formats_children.push(file_format_node);
}
let file_formats_format_name = "FileFormats".to_string();
let files_formats_format_ctx = AstFormatContext::with_children(
file_formats_format_name,
file_formats_children.len(),
);
let files_formats_node =
FormatTreeNode::with_children(files_formats_format_ctx, file_formats_children);
children.push(files_formats_node);
}
if !copy.validation_mode.is_empty() {
let validation_mode_name = format!("ValidationMode {}", copy.validation_mode);
let validation_mode_format_ctx = AstFormatContext::new(validation_mode_name);
let validation_mode_node = FormatTreeNode::new(validation_mode_format_ctx);
children.push(validation_mode_node);
}
let size_limit_name = format!("SizeLimit {}", copy.size_limit);
let size_limit_format_ctx = AstFormatContext::new(size_limit_name);
let size_limit_node = FormatTreeNode::new(size_limit_format_ctx);
children.push(size_limit_node);
let purge_name = format!("Purge {}", copy.purge);
let purge_name_ctx = AstFormatContext::new(purge_name);
let purge_name_node = FormatTreeNode::new(purge_name_ctx);
children.push(purge_name_node);
let name = "Copy".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_copy_unit(&mut self, copy_unit: &'ast CopyUnit) {
match copy_unit {
CopyUnit::Table {
catalog,
database,
table,
} => self.visit_table_ref(catalog, database, table),
CopyUnit::StageLocation(v) => {
let location_format_ctx =
AstFormatContext::new(format!("Location @{}{}", v.name, v.path));
let location_node = FormatTreeNode::new(location_format_ctx);
self.children.push(location_node);
}
CopyUnit::UriLocation(v) => {
let location_format_ctx = AstFormatContext::new(format!("UriLocation {}", v));
let location_node = FormatTreeNode::new(location_format_ctx);
self.children.push(location_node);
}
CopyUnit::Query(query) => self.visit_query(query),
}
let child = self.children.pop().unwrap();
let name = "CopyUnit".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_call(&mut self, call: &'ast CallStmt) {
let mut children = Vec::new();
for arg in call.args.iter() {
let arg_name = format!("Arg {}", arg);
let arg_format_ctx = AstFormatContext::new(arg_name);
let arg_node = FormatTreeNode::new(arg_format_ctx);
children.push(arg_node);
}
let node_name = format!("Call {}", call.name);
let format_ctx = AstFormatContext::with_children(node_name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_show_settings(&mut self, like: &'ast Option<String>) {
let mut children = Vec::new();
if let Some(like) = like {
let like_name = format!("Like {}", like);
let like_format_ctx = AstFormatContext::new(like_name);
let like_node = FormatTreeNode::new(like_format_ctx);
children.push(like_node);
}
let name = "ShowSetting".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_show_process_list(&mut self) {
let name = "ShowProcessList".to_string();
let format_ctx = AstFormatContext::new(name);
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
fn visit_show_metrics(&mut self) {
let name = "ShowMetrics".to_string();
let format_ctx = AstFormatContext::new(name);
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
fn visit_show_engines(&mut self) {
let name = "ShowEngines".to_string();
let format_ctx = AstFormatContext::new(name);
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
fn visit_show_functions(&mut self, limit: &'ast Option<ShowLimit>) {
let mut children = Vec::new();
if let Some(limit) = limit {
self.visit_show_limit(limit);
children.push(self.children.pop().unwrap());
}
let name = "ShowFunctions".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_show_limit(&mut self, limit: &'ast ShowLimit) {
match limit {
ShowLimit::Like { pattern } => {
let name = format!("LimitLike {}", pattern);
let format_ctx = AstFormatContext::new(name);
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
ShowLimit::Where { selection } => {
self.visit_expr(selection);
let child = self.children.pop().unwrap();
let name = "LimitWhere".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
}
}
fn visit_kill(&mut self, kill_target: &'ast KillTarget, object_id: &'ast str) {
let name = format!("Kill {} {}", kill_target, object_id);
let format_ctx = AstFormatContext::new(name);
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
fn visit_set_variable(
&mut self,
is_global: bool,
variable: &'ast Identifier,
value: &'ast Expr,
) {
let mut children = Vec::with_capacity(1);
self.visit_expr(value);
children.push(self.children.pop().unwrap());
let name = if is_global {
format!("SetGlobal {}", variable)
} else {
format!("Set {}", variable)
};
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_unset_variable(&mut self, stmt: &'ast UnSetStmt) {
let name = format!("UnSet {}", stmt);
let format_ctx = AstFormatContext::new(name);
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
fn visit_insert(&mut self, insert: &'ast InsertStmt) {
let mut children = Vec::new();
self.visit_table_ref(&insert.catalog, &insert.database, &insert.table);
children.push(self.children.pop().unwrap());
if !insert.columns.is_empty() {
let mut columns_children = Vec::with_capacity(insert.columns.len());
for column in insert.columns.iter() {
self.visit_identifier(column);
columns_children.push(self.children.pop().unwrap());
}
let columns_name = "Columns".to_string();
let columns_format_ctx =
AstFormatContext::with_children(columns_name, columns_children.len());
let columns_node = FormatTreeNode::with_children(columns_format_ctx, columns_children);
children.push(columns_node);
}
self.visit_insert_source(&insert.source);
children.push(self.children.pop().unwrap());
let name = if insert.overwrite {
"InsertOverwrite".to_string()
} else {
"Insert".to_string()
};
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_insert_source(&mut self, insert_source: &'ast InsertSource) {
match insert_source {
InsertSource::Streaming { format, .. } => {
let streaming_name = format!("StreamSource {}", format);
let streaming_format_ctx = AstFormatContext::new(streaming_name);
let streaming_node = FormatTreeNode::new(streaming_format_ctx);
self.children.push(streaming_node);
}
InsertSource::StreamingV2 { settings, .. } => {
let mut file_formats_children = Vec::with_capacity(settings.len());
for (k, v) in settings.iter() {
let file_format_name = format!("FileFormat {} = {:?}", k, v);
let file_format_format_ctx = AstFormatContext::new(file_format_name);
let file_format_node = FormatTreeNode::new(file_format_format_ctx);
file_formats_children.push(file_format_node);
}
let file_formats_format_name = "StreamSourceFileFormats".to_string();
let files_formats_format_ctx = AstFormatContext::with_children(
file_formats_format_name,
file_formats_children.len(),
);
let files_formats_node =
FormatTreeNode::with_children(files_formats_format_ctx, file_formats_children);
self.children.push(files_formats_node);
}
InsertSource::Values { .. } => {
let values_name = "ValueSource".to_string();
let values_format_ctx = AstFormatContext::new(values_name);
let values_node = FormatTreeNode::new(values_format_ctx);
self.children.push(values_node);
}
InsertSource::Select { query } => self.visit_query(query),
}
let child = self.children.pop().unwrap();
let name = "Source".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_delete(
&mut self,
table_reference: &'ast TableReference,
selection: &'ast Option<Expr>,
) {
let mut children = Vec::new();
self.visit_table_reference(table_reference);
children.push(self.children.pop().unwrap());
if let Some(selection) = selection {
self.visit_expr(selection);
children.push(self.children.pop().unwrap());
}
let name = "Delete".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_update(&mut self, update: &'ast UpdateStmt) {
let mut children = Vec::new();
self.visit_table_reference(&update.table);
children.push(self.children.pop().unwrap());
for update_expr in update.update_list.iter() {
self.visit_identifier(&update_expr.name);
children.push(self.children.pop().unwrap());
self.visit_expr(&update_expr.expr);
children.push(self.children.pop().unwrap());
}
if let Some(selection) = &update.selection {
self.visit_expr(selection);
children.push(self.children.pop().unwrap());
}
let name = "Update".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_show_databases(&mut self, stmt: &'ast ShowDatabasesStmt) {
let mut children = Vec::new();
if let Some(limit) = &stmt.limit {
self.visit_show_limit(limit);
children.push(self.children.pop().unwrap());
}
let name = "ShowDatabases".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_show_create_databases(&mut self, stmt: &'ast ShowCreateDatabaseStmt) {
self.visit_database_ref(&stmt.catalog, &stmt.database);
let child = self.children.pop().unwrap();
let name = "ShowCreateDatabase".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_create_database(&mut self, stmt: &'ast CreateDatabaseStmt) {
let mut children = Vec::new();
self.visit_database_ref(&stmt.catalog, &stmt.database);
children.push(self.children.pop().unwrap());
if let Some(engine) = &stmt.engine {
let engine_name = format!("DatabaseEngine {}", engine);
let engine_format_ctx = AstFormatContext::new(engine_name);
let engine_node = FormatTreeNode::new(engine_format_ctx);
children.push(engine_node);
}
if !stmt.options.is_empty() {
let mut options_children = Vec::with_capacity(stmt.options.len());
for option in stmt.options.iter() {
let option_name = format!("DatabaseOption {} = {:?}", option.name, option.value);
let option_format_ctx = AstFormatContext::new(option_name);
let option_format_node = FormatTreeNode::new(option_format_ctx);
options_children.push(option_format_node);
}
let options_format_name = "DatabaseOptions".to_string();
let options_format_ctx =
AstFormatContext::with_children(options_format_name, options_children.len());
let options_node = FormatTreeNode::with_children(options_format_ctx, options_children);
children.push(options_node);
}
let name = "CreateDatabase".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_drop_database(&mut self, stmt: &'ast DropDatabaseStmt) {
self.visit_database_ref(&stmt.catalog, &stmt.database);
let child = self.children.pop().unwrap();
let name = "DropDatabase".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_undrop_database(&mut self, stmt: &'ast UndropDatabaseStmt) {
self.visit_database_ref(&stmt.catalog, &stmt.database);
let child = self.children.pop().unwrap();
let name = "UndropDatabase".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_alter_database(&mut self, stmt: &'ast AlterDatabaseStmt) {
self.visit_database_ref(&stmt.catalog, &stmt.database);
let database_child = self.children.pop().unwrap();
let action_child = match &stmt.action {
AlterDatabaseAction::RenameDatabase { new_db } => {
let action_name = format!("Action RenameTo {}", new_db);
let action_format_ctx = AstFormatContext::new(action_name);
FormatTreeNode::new(action_format_ctx)
}
};
let name = "AlterDatabase".to_string();
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![database_child, action_child]);
self.children.push(node);
}
fn visit_use_database(&mut self, database: &'ast Identifier) {
self.visit_identifier(database);
let child = self.children.pop().unwrap();
let name = "UseDatabase".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_show_tables(&mut self, stmt: &'ast ShowTablesStmt) {
let mut children = Vec::new();
if let Some(database) = &stmt.database {
let database_name = format!("Database {}", database);
let database_format_ctx = AstFormatContext::new(database_name);
let database_node = FormatTreeNode::new(database_format_ctx);
children.push(database_node);
}
if let Some(limit) = &stmt.limit {
self.visit_show_limit(limit);
children.push(self.children.pop().unwrap());
}
let name = "ShowTables".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_show_columns(&mut self, stmt: &'ast ShowColumnsStmt) {
let mut children = Vec::new();
if let Some(database) = &stmt.database {
let database_name = format!("Database {}", database);
let database_format_ctx = AstFormatContext::new(database_name);
let database_node = FormatTreeNode::new(database_format_ctx);
children.push(database_node);
}
let table_name = format!("Table {}", &stmt.table);
let table_format_ctx = AstFormatContext::new(table_name);
let table_node = FormatTreeNode::new(table_format_ctx);
children.push(table_node);
if let Some(limit) = &stmt.limit {
self.visit_show_limit(limit);
children.push(self.children.pop().unwrap());
}
let name = "ShowColumns".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_show_create_table(&mut self, stmt: &'ast ShowCreateTableStmt) {
self.visit_table_ref(&stmt.catalog, &stmt.database, &stmt.table);
let child = self.children.pop().unwrap();
let name = "ShowCreateTable".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_describe_table(&mut self, stmt: &'ast DescribeTableStmt) {
self.visit_table_ref(&stmt.catalog, &stmt.database, &stmt.table);
let child = self.children.pop().unwrap();
let name = "DescribeTable".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_show_tables_status(&mut self, stmt: &'ast ShowTablesStatusStmt) {
let mut children = Vec::new();
if let Some(database) = &stmt.database {
let database_name = format!("Database {}", database);
let database_format_ctx = AstFormatContext::new(database_name);
let database_node = FormatTreeNode::new(database_format_ctx);
children.push(database_node);
}
if let Some(limit) = &stmt.limit {
self.visit_show_limit(limit);
children.push(self.children.pop().unwrap());
}
let name = "ShowTablesStatus".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_create_table(&mut self, stmt: &'ast CreateTableStmt) {
let mut children = Vec::new();
self.visit_table_ref(&stmt.catalog, &stmt.database, &stmt.table);
children.push(self.children.pop().unwrap());
if let Some(source) = &stmt.source {
self.visit_create_table_source(source);
children.push(self.children.pop().unwrap());
}
if let Some(engine) = &stmt.engine {
let engine_name = format!("TableEngine {}", engine);
let engine_format_ctx = AstFormatContext::new(engine_name);
let engine_node = FormatTreeNode::new(engine_format_ctx);
children.push(engine_node);
}
if !stmt.cluster_by.is_empty() {
let mut cluster_by_children = Vec::with_capacity(stmt.cluster_by.len());
for cluster_by in stmt.cluster_by.iter() {
self.visit_expr(cluster_by);
cluster_by_children.push(self.children.pop().unwrap());
}
let cluster_by_name = "ClusterByList".to_string();
let cluster_by_format_ctx =
AstFormatContext::with_children(cluster_by_name, cluster_by_children.len());
let cluster_by_node =
FormatTreeNode::with_children(cluster_by_format_ctx, cluster_by_children);
children.push(cluster_by_node);
}
if !stmt.table_options.is_empty() {
let mut table_options_children = Vec::with_capacity(stmt.table_options.len());
for (k, v) in stmt.table_options.iter() {
let table_option_name = format!("TableOption {} = {:?}", k, v);
let table_option_format_ctx = AstFormatContext::new(table_option_name);
let table_option_node = FormatTreeNode::new(table_option_format_ctx);
table_options_children.push(table_option_node);
}
let table_options_format_name = "TableOptions".to_string();
let table_options_format_ctx = AstFormatContext::with_children(
table_options_format_name,
table_options_children.len(),
);
let table_options_node =
FormatTreeNode::with_children(table_options_format_ctx, table_options_children);
children.push(table_options_node);
}
if let Some(as_query) = &stmt.as_query {
self.visit_query(as_query);
children.push(self.children.pop().unwrap());
}
let name = "CreateTable".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_create_table_source(&mut self, source: &'ast CreateTableSource) {
match source {
CreateTableSource::Columns(columns) => {
let mut children = Vec::with_capacity(columns.len());
for column in columns.iter() {
self.visit_column_definition(column);
children.push(self.children.pop().unwrap());
}
let name = "ColumnsDefinition".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
CreateTableSource::Like {
catalog,
database,
table,
} => {
self.visit_table_ref(catalog, database, table);
let child = self.children.pop().unwrap();
let name = "LikeTable".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
}
}
fn visit_column_definition(&mut self, column_definition: &'ast ColumnDefinition) {
let type_name = format!("DataType {}", column_definition.data_type);
let type_format_ctx = AstFormatContext::new(type_name);
let type_node = FormatTreeNode::new(type_format_ctx);
let name = format!("ColumnDefinition {}", column_definition.name);
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![type_node]);
self.children.push(node);
}
fn visit_drop_table(&mut self, stmt: &'ast DropTableStmt) {
self.visit_table_ref(&stmt.catalog, &stmt.database, &stmt.table);
let child = self.children.pop().unwrap();
let name = "DropTable".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_undrop_table(&mut self, stmt: &'ast UndropTableStmt) {
self.visit_table_ref(&stmt.catalog, &stmt.database, &stmt.table);
let child = self.children.pop().unwrap();
let name = "UndropTable".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_alter_table(&mut self, stmt: &'ast AlterTableStmt) {
self.visit_table_reference(&stmt.table_reference);
let table_child = self.children.pop().unwrap();
let action_child = match &stmt.action {
AlterTableAction::RenameTable { new_table } => {
let action_name = format!("Action RenameTo {}", new_table);
let action_format_ctx = AstFormatContext::new(action_name);
FormatTreeNode::new(action_format_ctx)
}
AlterTableAction::AddColumn { column } => {
let action_name = format!("Action Add column {}", column);
let action_format_ctx = AstFormatContext::new(action_name);
FormatTreeNode::new(action_format_ctx)
}
AlterTableAction::DropColumn { column } => {
let action_name = format!("Action Drop column {}", column);
let action_format_ctx = AstFormatContext::new(action_name);
FormatTreeNode::new(action_format_ctx)
}
AlterTableAction::AlterTableClusterKey { cluster_by } => {
let mut cluster_by_children = Vec::with_capacity(cluster_by.len());
for cluster_by_expr in cluster_by.iter() {
self.visit_expr(cluster_by_expr);
cluster_by_children.push(self.children.pop().unwrap());
}
let cluster_by_name = "Action ClusterByList".to_string();
let cluster_by_format_ctx =
AstFormatContext::with_children(cluster_by_name, cluster_by_children.len());
FormatTreeNode::with_children(cluster_by_format_ctx, cluster_by_children)
}
AlterTableAction::DropTableClusterKey => {
let action_name = "Action DropClusterKey".to_string();
let action_format_ctx = AstFormatContext::new(action_name);
FormatTreeNode::new(action_format_ctx)
}
AlterTableAction::ReclusterTable { selection, .. } => {
let mut children = Vec::new();
if let Some(selection) = selection {
self.visit_expr(selection);
children.push(self.children.pop().unwrap());
}
let action_name = "Action Recluster".to_string();
let action_format_ctx =
AstFormatContext::with_children(action_name, children.len());
FormatTreeNode::with_children(action_format_ctx, children)
}
AlterTableAction::RevertTo { point } => {
self.visit_time_travel_point(point);
let point_node = self.children.pop().unwrap();
let action_name = "Action RevertTo".to_string();
let action_format_ctx = AstFormatContext::with_children(action_name, 1);
FormatTreeNode::with_children(action_format_ctx, vec![point_node])
}
};
let name = "AlterTable".to_string();
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![table_child, action_child]);
self.children.push(node);
}
fn visit_rename_table(&mut self, stmt: &'ast RenameTableStmt) {
self.visit_table_ref(&stmt.catalog, &stmt.database, &stmt.table);
let old_child = self.children.pop().unwrap();
self.visit_table_ref(&stmt.new_catalog, &stmt.new_database, &stmt.new_table);
let new_child = self.children.pop().unwrap();
let name = "RenameTable".to_string();
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![old_child, new_child]);
self.children.push(node);
}
fn visit_truncate_table(&mut self, stmt: &'ast TruncateTableStmt) {
self.visit_table_ref(&stmt.catalog, &stmt.database, &stmt.table);
let child = self.children.pop().unwrap();
let name = "TruncateTable".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_optimize_table(&mut self, stmt: &'ast OptimizeTableStmt) {
let mut children = Vec::new();
self.visit_table_ref(&stmt.catalog, &stmt.database, &stmt.table);
children.push(self.children.pop().unwrap());
let action_name = format!("Action {}", stmt.action);
let action_format_ctx = AstFormatContext::new(action_name);
children.push(FormatTreeNode::new(action_format_ctx));
let name = "OptimizeTable".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_analyze_table(&mut self, stmt: &'ast AnalyzeTableStmt) {
let mut children = Vec::new();
self.visit_table_ref(&stmt.catalog, &stmt.database, &stmt.table);
children.push(self.children.pop().unwrap());
let name = "AnalyzeTable".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_exists_table(&mut self, stmt: &'ast ExistsTableStmt) {
self.visit_table_ref(&stmt.catalog, &stmt.database, &stmt.table);
let child = self.children.pop().unwrap();
let name = "ExistsTable".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_create_view(&mut self, stmt: &'ast CreateViewStmt) {
self.visit_table_ref(&stmt.catalog, &stmt.database, &stmt.view);
let view_child = self.children.pop().unwrap();
self.visit_query(&stmt.query);
let query_child = self.children.pop().unwrap();
let name = "CreateView".to_string();
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![view_child, query_child]);
self.children.push(node);
}
fn visit_alter_view(&mut self, stmt: &'ast AlterViewStmt) {
self.visit_table_ref(&stmt.catalog, &stmt.database, &stmt.view);
let view_child = self.children.pop().unwrap();
self.visit_query(&stmt.query);
let query_child = self.children.pop().unwrap();
let name = "AlterView".to_string();
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![view_child, query_child]);
self.children.push(node);
}
fn visit_drop_view(&mut self, stmt: &'ast DropViewStmt) {
self.visit_table_ref(&stmt.catalog, &stmt.database, &stmt.view);
let child = self.children.pop().unwrap();
let name = "DropView".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_show_users(&mut self) {
let name = "ShowUsers".to_string();
let format_ctx = AstFormatContext::new(name);
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
fn visit_create_user(&mut self, stmt: &'ast CreateUserStmt) {
let mut children = Vec::new();
let user_name = format!("User {}", stmt.user);
let user_format_ctx = AstFormatContext::new(user_name);
children.push(FormatTreeNode::new(user_format_ctx));
if let Some(auth_type) = &stmt.auth_option.auth_type {
let auth_type_name = format!("AuthType {}", auth_type.to_str());
let auth_type_format_ctx = AstFormatContext::new(auth_type_name);
children.push(FormatTreeNode::new(auth_type_format_ctx));
}
if let Some(password) = &stmt.auth_option.password {
let auth_password_name = format!("Password {:?}", password);
let auth_password_format_ctx = AstFormatContext::new(auth_password_name);
children.push(FormatTreeNode::new(auth_password_format_ctx));
}
if !stmt.user_options.is_empty() {
let mut user_options_children = Vec::with_capacity(stmt.user_options.len());
for user_option in stmt.user_options.iter() {
let user_option_name = format!("UserOption {}", user_option);
let user_option_format_ctx = AstFormatContext::new(user_option_name);
let user_option_node = FormatTreeNode::new(user_option_format_ctx);
user_options_children.push(user_option_node);
}
let user_options_format_name = "UserOptions".to_string();
let user_options_format_ctx = AstFormatContext::with_children(
user_options_format_name,
user_options_children.len(),
);
let user_options_node =
FormatTreeNode::with_children(user_options_format_ctx, user_options_children);
children.push(user_options_node);
}
let name = "CreateUser".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_alter_user(&mut self, stmt: &'ast AlterUserStmt) {
let mut children = Vec::new();
if let Some(user) = &stmt.user {
let user_name = format!("User {}", user);
let user_format_ctx = AstFormatContext::new(user_name);
children.push(FormatTreeNode::new(user_format_ctx));
}
if let Some(auth_option) = &stmt.auth_option {
if let Some(auth_type) = &auth_option.auth_type {
let auth_type_name = format!("AuthType {}", auth_type.to_str());
let auth_type_format_ctx = AstFormatContext::new(auth_type_name);
children.push(FormatTreeNode::new(auth_type_format_ctx));
}
if let Some(password) = &auth_option.password {
let auth_password_name = format!("Password {}", password);
let auth_password_format_ctx = AstFormatContext::new(auth_password_name);
children.push(FormatTreeNode::new(auth_password_format_ctx));
}
}
if !stmt.user_options.is_empty() {
let mut user_options_children = Vec::with_capacity(stmt.user_options.len());
for user_option in stmt.user_options.iter() {
let user_option_name = format!("UserOption {}", user_option);
let user_option_format_ctx = AstFormatContext::new(user_option_name);
let user_option_node = FormatTreeNode::new(user_option_format_ctx);
user_options_children.push(user_option_node);
}
let user_options_format_name = "UserOptions".to_string();
let user_options_format_ctx = AstFormatContext::with_children(
user_options_format_name,
user_options_children.len(),
);
let user_options_node =
FormatTreeNode::with_children(user_options_format_ctx, user_options_children);
children.push(user_options_node);
}
let name = "AlterUser".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_drop_user(&mut self, _if_exists: bool, user: &'ast UserIdentity) {
let user_name = format!("User {}", user);
let user_format_ctx = AstFormatContext::new(user_name);
let child = FormatTreeNode::new(user_format_ctx);
let name = "DropUser".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_show_roles(&mut self) {
let name = "ShowRoles".to_string();
let format_ctx = AstFormatContext::new(name);
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
fn visit_create_role(&mut self, _if_not_exists: bool, role_name: &'ast str) {
let role_name = format!("Role {}", role_name);
let role_format_ctx = AstFormatContext::new(role_name);
let child = FormatTreeNode::new(role_format_ctx);
let name = "CreateRole".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_drop_role(&mut self, _if_exists: bool, role_name: &'ast str) {
let role_name = format!("Role {}", role_name);
let role_format_ctx = AstFormatContext::new(role_name);
let child = FormatTreeNode::new(role_format_ctx);
let name = "DropRole".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_grant(&mut self, grant: &'ast GrantStmt) {
let source_child = match &grant.source {
AccountMgrSource::Role { role } => {
let role_name = format!("Role {}", role);
let role_format_ctx = AstFormatContext::new(role_name);
FormatTreeNode::new(role_format_ctx)
}
AccountMgrSource::Privs { privileges, .. } => {
let mut privileges_children = Vec::with_capacity(privileges.len());
for privilege in privileges.iter() {
let privilege_name = format!("Privilege {}", privilege);
let privilege_format_ctx = AstFormatContext::new(privilege_name);
privileges_children.push(FormatTreeNode::new(privilege_format_ctx));
}
let privileges_name = "Privileges".to_string();
let privileges_format_ctx =
AstFormatContext::with_children(privileges_name, privileges_children.len());
FormatTreeNode::with_children(privileges_format_ctx, privileges_children)
}
AccountMgrSource::ALL { .. } => {
let all_name = "All".to_string();
let all_format_ctx = AstFormatContext::new(all_name);
FormatTreeNode::new(all_format_ctx)
}
};
let principal_name = match &grant.principal {
PrincipalIdentity::User(user) => format!("User {}", user),
PrincipalIdentity::Role(role) => format!("Role {}", role),
};
let principal_format_ctx = AstFormatContext::new(principal_name);
let principal_child = FormatTreeNode::new(principal_format_ctx);
let name = "Grant".to_string();
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![source_child, principal_child]);
self.children.push(node);
}
fn visit_show_grant(&mut self, principal: &'ast Option<PrincipalIdentity>) {
let mut children = Vec::new();
if let Some(principal) = &principal {
let principal_name = match principal {
PrincipalIdentity::User(user) => format!("User {}", user),
PrincipalIdentity::Role(role) => format!("Role {}", role),
};
let principal_format_ctx = AstFormatContext::new(principal_name);
children.push(FormatTreeNode::new(principal_format_ctx));
}
let name = "ShowGrant".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_revoke(&mut self, revoke: &'ast RevokeStmt) {
let source_child = match &revoke.source {
AccountMgrSource::Role { role } => {
let role_name = format!("Role {}", role);
let role_format_ctx = AstFormatContext::new(role_name);
FormatTreeNode::new(role_format_ctx)
}
AccountMgrSource::Privs { privileges, .. } => {
let mut privileges_children = Vec::with_capacity(privileges.len());
for privilege in privileges.iter() {
let privilege_name = format!("Privilege {}", privilege);
let privilege_format_ctx = AstFormatContext::new(privilege_name);
privileges_children.push(FormatTreeNode::new(privilege_format_ctx));
}
let privileges_name = "Privileges".to_string();
let privileges_format_ctx =
AstFormatContext::with_children(privileges_name, privileges_children.len());
FormatTreeNode::with_children(privileges_format_ctx, privileges_children)
}
AccountMgrSource::ALL { .. } => {
let all_name = "All".to_string();
let all_format_ctx = AstFormatContext::new(all_name);
FormatTreeNode::new(all_format_ctx)
}
};
let principal_name = match &revoke.principal {
PrincipalIdentity::User(user) => format!("User {}", user),
PrincipalIdentity::Role(role) => format!("Role {}", role),
};
let principal_format_ctx = AstFormatContext::new(principal_name);
let principal_child = FormatTreeNode::new(principal_format_ctx);
let name = "Revoke".to_string();
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![source_child, principal_child]);
self.children.push(node);
}
fn visit_create_udf(
&mut self,
_if_not_exists: bool,
udf_name: &'ast Identifier,
parameters: &'ast [Identifier],
definition: &'ast Expr,
description: &'ast Option<String>,
) {
let mut children = Vec::new();
let udf_name_format_ctx = AstFormatContext::new(format!("UdfNameIdentifier {}", udf_name));
children.push(FormatTreeNode::new(udf_name_format_ctx));
if !parameters.is_empty() {
let mut parameters_children = Vec::with_capacity(parameters.len());
for parameter in parameters.iter() {
self.visit_identifier(parameter);
parameters_children.push(self.children.pop().unwrap());
}
let parameters_name = "UdfParameters".to_string();
let parameters_format_ctx =
AstFormatContext::with_children(parameters_name, parameters_children.len());
children.push(FormatTreeNode::with_children(
parameters_format_ctx,
parameters_children,
));
}
self.visit_expr(definition);
let definition_child = self.children.pop().unwrap();
let definition_name = "UdfDefinition".to_string();
let definition_format_ctx = AstFormatContext::with_children(definition_name, 1);
children.push(FormatTreeNode::with_children(definition_format_ctx, vec![
definition_child,
]));
if let Some(description) = description {
let description_name = format!("UdfDescription {}", description);
let description_format_ctx = AstFormatContext::new(description_name);
children.push(FormatTreeNode::new(description_format_ctx));
}
let name = "CreateUdf".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_drop_udf(&mut self, _if_exists: bool, udf_name: &'ast Identifier) {
let udf_name_format_ctx = AstFormatContext::new(format!("UdfIdentifier {}", udf_name));
let child = FormatTreeNode::new(udf_name_format_ctx);
let name = "DropUdf".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_alter_udf(
&mut self,
udf_name: &'ast Identifier,
parameters: &'ast [Identifier],
definition: &'ast Expr,
description: &'ast Option<String>,
) {
let mut children = Vec::new();
let udf_name_format_ctx = AstFormatContext::new(format!("UdfNameIdentifier {}", udf_name));
children.push(FormatTreeNode::new(udf_name_format_ctx));
if !parameters.is_empty() {
let mut parameters_children = Vec::with_capacity(parameters.len());
for parameter in parameters.iter() {
self.visit_identifier(parameter);
parameters_children.push(self.children.pop().unwrap());
}
let parameters_name = "UdfParameters".to_string();
let parameters_format_ctx =
AstFormatContext::with_children(parameters_name, parameters_children.len());
children.push(FormatTreeNode::with_children(
parameters_format_ctx,
parameters_children,
));
}
self.visit_expr(definition);
let definition_child = self.children.pop().unwrap();
let definition_name = "UdfDefinition".to_string();
let definition_format_ctx = AstFormatContext::with_children(definition_name, 1);
children.push(FormatTreeNode::with_children(definition_format_ctx, vec![
definition_child,
]));
if let Some(description) = description {
let description_name = format!("UdfDescription {}", description);
let description_format_ctx = AstFormatContext::new(description_name);
children.push(FormatTreeNode::new(description_format_ctx));
}
let name = "AlterUdf".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_create_stage(&mut self, stmt: &'ast CreateStageStmt) {
let mut children = Vec::new();
let stage_name_format_ctx = AstFormatContext::new(format!("StageName {}", stmt.stage_name));
children.push(FormatTreeNode::new(stage_name_format_ctx));
if let Some(location) = &stmt.location {
let location_name = format!("Location {}", location);
let location_format_ctx = AstFormatContext::new(location_name);
children.push(FormatTreeNode::new(location_format_ctx));
}
if !stmt.file_format_options.is_empty() {
let mut file_formats_children = Vec::with_capacity(stmt.file_format_options.len());
for (k, v) in stmt.file_format_options.iter() {
let file_format_name = format!("FileFormat {} = {:?}", k, v);
let file_format_format_ctx = AstFormatContext::new(file_format_name);
let file_format_node = FormatTreeNode::new(file_format_format_ctx);
file_formats_children.push(file_format_node);
}
let file_formats_format_name = "FileFormats".to_string();
let files_formats_format_ctx = AstFormatContext::with_children(
file_formats_format_name,
file_formats_children.len(),
);
let files_formats_node =
FormatTreeNode::with_children(files_formats_format_ctx, file_formats_children);
children.push(files_formats_node);
}
if !stmt.on_error.is_empty() {
let on_error_name = format!("OnError {}", stmt.on_error);
let on_error_format_ctx = AstFormatContext::new(on_error_name);
children.push(FormatTreeNode::new(on_error_format_ctx));
}
let size_limit_name = format!("SizeLimit {}", stmt.size_limit);
let size_limit_format_ctx = AstFormatContext::new(size_limit_name);
children.push(FormatTreeNode::new(size_limit_format_ctx));
if !stmt.validation_mode.is_empty() {
let validation_mode_name = format!("ValidationMode {}", stmt.validation_mode);
let validation_mode_format_ctx = AstFormatContext::new(validation_mode_name);
children.push(FormatTreeNode::new(validation_mode_format_ctx));
}
if !stmt.comments.is_empty() {
let comments_name = format!("Comments {}", stmt.comments);
let comments_format_ctx = AstFormatContext::new(comments_name);
children.push(FormatTreeNode::new(comments_format_ctx));
}
let name = "CreateStage".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_show_stages(&mut self) {
let name = "ShowStages".to_string();
let format_ctx = AstFormatContext::new(name);
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
fn visit_drop_stage(&mut self, _if_exists: bool, stage_name: &'ast str) {
let stage_name_format_ctx = AstFormatContext::new(format!("StageName {}", stage_name));
let child = FormatTreeNode::new(stage_name_format_ctx);
let name = "DropStage".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_describe_stage(&mut self, stage_name: &'ast str) {
let stage_name_format_ctx = AstFormatContext::new(format!("StageName {}", stage_name));
let child = FormatTreeNode::new(stage_name_format_ctx);
let name = "DescribeStage".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_remove_stage(&mut self, location: &'ast str, pattern: &'ast str) {
let location_format_ctx = AstFormatContext::new(format!("Location {}", location));
let location_child = FormatTreeNode::new(location_format_ctx);
let pattern_format_ctx = AstFormatContext::new(format!("Pattern {}", pattern));
let pattern_child = FormatTreeNode::new(pattern_format_ctx);
let name = "RemoveStage".to_string();
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![location_child, pattern_child]);
self.children.push(node);
}
fn visit_list_stage(&mut self, location: &'ast str, pattern: &'ast str) {
let location_format_ctx = AstFormatContext::new(format!("Location {}", location));
let location_child = FormatTreeNode::new(location_format_ctx);
let pattern_format_ctx = AstFormatContext::new(format!("Pattern {}", pattern));
let pattern_child = FormatTreeNode::new(pattern_format_ctx);
let name = "ListStage".to_string();
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![location_child, pattern_child]);
self.children.push(node);
}
fn visit_presign(&mut self, presign: &'ast PresignStmt) {
let mut children = Vec::with_capacity(3);
let action_format_ctx = AstFormatContext::new(format!("Action {}", presign.action));
children.push(FormatTreeNode::new(action_format_ctx));
let location_format_ctx = AstFormatContext::new(format!("Location {}", presign.location));
children.push(FormatTreeNode::new(location_format_ctx));
let expire_format_ctx = AstFormatContext::new(format!("Expire {:?}", presign.expire));
children.push(FormatTreeNode::new(expire_format_ctx));
let name = "Presign".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_create_share(&mut self, stmt: &'ast CreateShareStmt) {
let mut children = Vec::new();
let share_format_ctx = AstFormatContext::new(format!("ShareIdentifier {}", stmt.share));
children.push(FormatTreeNode::new(share_format_ctx));
if let Some(comment) = &stmt.comment {
let comment_format_ctx = AstFormatContext::new(format!("Comment {}", comment));
children.push(FormatTreeNode::new(comment_format_ctx));
}
let name = "CreateShare".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_drop_share(&mut self, stmt: &'ast DropShareStmt) {
let share_format_ctx = AstFormatContext::new(format!("ShareIdentifier {}", stmt.share));
let child = FormatTreeNode::new(share_format_ctx);
let name = "DropShare".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_grant_share_object(&mut self, stmt: &'ast GrantShareObjectStmt) {
let mut children = Vec::new();
let share_format_ctx = AstFormatContext::new(format!("ShareIdentifier {}", stmt.share));
children.push(FormatTreeNode::new(share_format_ctx));
let object_format_ctx = AstFormatContext::new(format!("Object {}", stmt.object));
children.push(FormatTreeNode::new(object_format_ctx));
let privilege_format_ctx = AstFormatContext::new(format!("Privilege {}", stmt.privilege));
children.push(FormatTreeNode::new(privilege_format_ctx));
let name = "GrantShareObject".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_revoke_share_object(&mut self, stmt: &'ast RevokeShareObjectStmt) {
let mut children = Vec::new();
let share_format_ctx = AstFormatContext::new(format!("ShareIdentifier {}", stmt.share));
children.push(FormatTreeNode::new(share_format_ctx));
let object_format_ctx = AstFormatContext::new(format!("Object {}", stmt.object));
children.push(FormatTreeNode::new(object_format_ctx));
let privilege_format_ctx = AstFormatContext::new(format!("Privilege {}", stmt.privilege));
children.push(FormatTreeNode::new(privilege_format_ctx));
let name = "RevokeShareObject".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_alter_share_tenants(&mut self, stmt: &'ast AlterShareTenantsStmt) {
let mut children = Vec::new();
let share_format_ctx = AstFormatContext::new(format!("ShareIdentifier {}", stmt.share));
children.push(FormatTreeNode::new(share_format_ctx));
if !stmt.tenants.is_empty() {
let mut tenants_children = Vec::with_capacity(stmt.tenants.len());
for tenant in stmt.tenants.iter() {
self.visit_identifier(tenant);
tenants_children.push(self.children.pop().unwrap());
}
let tenants_name = "Tenants".to_string();
let tenants_format_ctx =
AstFormatContext::with_children(tenants_name, tenants_children.len());
let tenants_node = FormatTreeNode::with_children(tenants_format_ctx, tenants_children);
children.push(tenants_node);
}
let name = "AlterShareTenants".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_desc_share(&mut self, stmt: &'ast DescShareStmt) {
let share_format_ctx = AstFormatContext::new(format!("ShareIdentifier {}", stmt.share));
let child = FormatTreeNode::new(share_format_ctx);
let name = "DescShare".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_show_shares(&mut self, _stmt: &'ast ShowSharesStmt) {
let name = "ShowShares".to_string();
let format_ctx = AstFormatContext::new(name);
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
fn visit_show_object_grant_privileges(&mut self, stmt: &'ast ShowObjectGrantPrivilegesStmt) {
let object_format_ctx = AstFormatContext::new(format!("Object {}", stmt.object));
let child = FormatTreeNode::new(object_format_ctx);
let name = "ShowObjectGrantPrivileges".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_show_grants_of_share(&mut self, stmt: &'ast ShowGrantsOfShareStmt) {
let share_format_ctx = AstFormatContext::new(format!("ShareName {}", stmt.share_name));
let child = FormatTreeNode::new(share_format_ctx);
let name = "ShowGrantsOfShare".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_with(&mut self, with: &'ast With) {
let mut children = Vec::with_capacity(with.ctes.len());
for cte in with.ctes.iter() {
self.visit_query(&cte.query);
let query_child = self.children.pop().unwrap();
let cte_format_ctx = AstFormatContext::with_children_alias(
"CTE".to_string(),
1,
Some(format!("{}", cte.alias)),
);
let cte_node = FormatTreeNode::with_children(cte_format_ctx, vec![query_child]);
children.push(cte_node);
}
let name = "With".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_set_expr(&mut self, expr: &'ast SetExpr) {
match expr {
SetExpr::Select(select_stmt) => self.visit_select_stmt(select_stmt),
SetExpr::Query(query) => self.visit_query(query),
SetExpr::SetOperation(set_operation) => self.visit_set_operation(set_operation),
}
let child = self.children.pop().unwrap();
let name = "QueryBody".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_set_operation(&mut self, set_operation: &'ast SetOperation) {
self.visit_set_expr(&set_operation.left);
let left_child = self.children.pop().unwrap();
self.visit_set_expr(&set_operation.right);
let right_child = self.children.pop().unwrap();
let name = format!("SetOperation {}", match set_operation.op {
SetOperator::Union => "Union",
SetOperator::Except => "Except",
SetOperator::Intersect => "Intersect",
});
let format_ctx = AstFormatContext::with_children(name, 2);
let node = FormatTreeNode::with_children(format_ctx, vec![left_child, right_child]);
self.children.push(node);
}
fn visit_order_by(&mut self, order_by: &'ast OrderByExpr) {
self.visit_expr(&order_by.expr);
let child = self.children.pop().unwrap();
let format_ctx = AstFormatContext::with_children("OrderByElement".to_string(), 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
fn visit_select_stmt(&mut self, stmt: &'ast SelectStmt) {
let mut children = Vec::new();
if !stmt.select_list.is_empty() {
let mut select_list_children = Vec::with_capacity(stmt.select_list.len());
for select_target in stmt.select_list.iter() {
self.visit_select_target(select_target);
select_list_children.push(self.children.pop().unwrap());
}
let select_list_name = "SelectList".to_string();
let select_list_format_ctx =
AstFormatContext::with_children(select_list_name, select_list_children.len());
let select_list_node =
FormatTreeNode::with_children(select_list_format_ctx, select_list_children);
children.push(select_list_node);
}
if !stmt.from.is_empty() {
let mut table_list_children = Vec::with_capacity(stmt.from.len());
for table in stmt.from.iter() {
self.visit_table_reference(table);
table_list_children.push(self.children.pop().unwrap());
}
let table_list_name = "TableList".to_string();
let table_list_format_ctx =
AstFormatContext::with_children(table_list_name, table_list_children.len());
let table_list_node =
FormatTreeNode::with_children(table_list_format_ctx, table_list_children);
children.push(table_list_node);
}
if let Some(selection) = &stmt.selection {
self.visit_expr(selection);
let selection_child = self.children.pop().unwrap();
let selection_name = "Where".to_string();
let selection_format_ctx = AstFormatContext::with_children(selection_name, 1);
let selection_node =
FormatTreeNode::with_children(selection_format_ctx, vec![selection_child]);
children.push(selection_node);
}
if let Some(group_by) = &stmt.group_by {
match group_by {
GroupBy::Normal(exprs) => {
let mut group_by_list_children = Vec::with_capacity(exprs.len());
for group_by in exprs.iter() {
self.visit_expr(group_by);
group_by_list_children.push(self.children.pop().unwrap());
}
let group_by_list_name = "GroupByList".to_string();
let group_by_list_format_ctx = AstFormatContext::with_children(
group_by_list_name,
group_by_list_children.len(),
);
let group_by_list_node = FormatTreeNode::with_children(
group_by_list_format_ctx,
group_by_list_children,
);
children.push(group_by_list_node);
}
GroupBy::GroupingSets(sets) => {
let mut grouping_sets = Vec::with_capacity(sets.len());
for set in sets.iter() {
let mut grouping_set = Vec::with_capacity(set.len());
for expr in set.iter() {
self.visit_expr(expr);
grouping_set.push(self.children.pop().unwrap());
}
let name = "GroupingSet".to_string();
let grouping_set_format_ctx =
AstFormatContext::with_children(name, grouping_set.len());
let grouping_set_node =
FormatTreeNode::with_children(grouping_set_format_ctx, grouping_set);
grouping_sets.push(grouping_set_node);
}
let group_by_list_name = "GroupByList".to_string();
let group_by_list_format_ctx =
AstFormatContext::with_children(group_by_list_name, grouping_sets.len());
let group_by_list_node =
FormatTreeNode::with_children(group_by_list_format_ctx, grouping_sets);
children.push(group_by_list_node);
}
GroupBy::Rollup(exprs) => {
let mut rollup_list_children = Vec::with_capacity(exprs.len());
for group_by in exprs.iter() {
self.visit_expr(group_by);
rollup_list_children.push(self.children.pop().unwrap());
}
let rollup_list_name = "GroupByRollUpList".to_string();
let rollup_list_format_ctx = AstFormatContext::with_children(
rollup_list_name,
rollup_list_children.len(),
);
let rollup_list_node =
FormatTreeNode::with_children(rollup_list_format_ctx, rollup_list_children);
children.push(rollup_list_node);
}
GroupBy::Cube(exprs) => {
let mut cube_list_children = Vec::with_capacity(exprs.len());
for group_by in exprs.iter() {
self.visit_expr(group_by);
cube_list_children.push(self.children.pop().unwrap());
}
let cube_list_name = "GroupByCubeList".to_string();
let cube_list_format_ctx =
AstFormatContext::with_children(cube_list_name, cube_list_children.len());
let cube_list_node =
FormatTreeNode::with_children(cube_list_format_ctx, cube_list_children);
children.push(cube_list_node);
}
}
}
if let Some(having) = &stmt.having {
self.visit_expr(having);
let having_child = self.children.pop().unwrap();
let having_name = "Having".to_string();
let having_format_ctx = AstFormatContext::with_children(having_name, 1);
let having_node = FormatTreeNode::with_children(having_format_ctx, vec![having_child]);
children.push(having_node);
}
let name = "SelectQuery".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
fn visit_select_target(&mut self, target: &'ast SelectTarget) {
match target {
SelectTarget::AliasedExpr { expr, alias } => {
self.visit_expr(expr);
let child = self.children.pop().unwrap();
let name = "Target".to_string();
let format_ctx = if let Some(alias) = alias {
AstFormatContext::with_children_alias(name, 1, Some(format!("{}", alias)))
} else {
AstFormatContext::with_children(name, 1)
};
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
SelectTarget::QualifiedName { .. } => {
let name = format!("Target {}", target);
let format_ctx = AstFormatContext::new(name);
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
}
}
fn visit_table_reference(&mut self, table: &'ast TableReference) {
match table {
TableReference::Table {
span: _,
catalog,
database,
table,
alias,
travel_point,
pivot,
unpivot,
} => {
let mut name = String::new();
name.push_str("TableIdentifier ");
if let Some(catalog) = catalog {
name.push_str(&catalog.to_string());
name.push('.');
}
if let Some(database) = database {
name.push_str(&database.to_string());
name.push('.');
}
name.push_str(&table.to_string());
if let Some(pivot) = pivot {
name.push(' ');
name.push_str(&pivot.to_string());
}
if let Some(unpivot) = unpivot {
name.push(' ');
name.push_str(&unpivot.to_string());
}
let mut children = Vec::new();
if let Some(travel_point) = travel_point {
self.visit_time_travel_point(travel_point);
children.push(self.children.pop().unwrap());
}
let format_ctx = if let Some(alias) = alias {
AstFormatContext::with_children_alias(
name,
children.len(),
Some(format!("{}", alias)),
)
} else {
AstFormatContext::with_children(name, children.len())
};
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
TableReference::Subquery {
span: _,
subquery,
alias,
} => {
self.visit_query(subquery);
let child = self.children.pop().unwrap();
let name = "Subquery".to_string();
let format_ctx = if let Some(alias) = alias {
AstFormatContext::with_children_alias(name, 1, Some(format!("{}", alias)))
} else {
AstFormatContext::with_children(name, 1)
};
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
TableReference::TableFunction {
span: _,
name,
params,
named_params,
alias,
} => {
let mut children = Vec::with_capacity(params.len());
for param in params.iter() {
self.visit_expr(param);
children.push(self.children.pop().unwrap());
}
for (name, param) in named_params.iter() {
self.visit_expr(param);
let child = self.children.pop().unwrap();
let node = FormatTreeNode::with_children(
AstFormatContext::new(format!("{}=>{}", name, child.payload)),
child.children,
);
children.push(node);
}
let func_name = format!("TableFunction {}", name);
let format_ctx = if let Some(alias) = alias {
AstFormatContext::with_children_alias(
func_name,
children.len(),
Some(format!("{}", alias)),
)
} else {
AstFormatContext::with_children(func_name, children.len())
};
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
TableReference::Join { span: _, join } => {
self.visit_join(join);
let child = self.children.pop().unwrap();
let name = "TableJoin".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
TableReference::Stage {
span: _,
location,
options,
alias,
} => {
let mut children = Vec::new();
if let Some(files) = &options.files {
let files = files.join(",");
let files = format!("files = {}", files);
children.push(FormatTreeNode::new(AstFormatContext::new(files)))
}
if let Some(pattern) = &options.pattern {
let pattern = format!("pattern = {}", pattern);
children.push(FormatTreeNode::new(AstFormatContext::new(pattern)))
}
let stage_name = format!("Stage {:?}", location);
let format_ctx = if let Some(alias) = alias {
AstFormatContext::with_children_alias(
stage_name,
children.len(),
Some(format!("{}", alias)),
)
} else {
AstFormatContext::with_children(stage_name, children.len())
};
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node)
}
}
}
fn visit_time_travel_point(&mut self, time: &'ast TimeTravelPoint) {
match time {
TimeTravelPoint::Snapshot(sid) => {
let name = format!("Snapshot {}", sid);
let format_ctx = AstFormatContext::new(name);
let node = FormatTreeNode::new(format_ctx);
self.children.push(node);
}
TimeTravelPoint::Timestamp(expr) => {
self.visit_expr(expr);
let child = self.children.pop().unwrap();
let name = "Timestamp".to_string();
let format_ctx = AstFormatContext::with_children(name, 1);
let node = FormatTreeNode::with_children(format_ctx, vec![child]);
self.children.push(node);
}
}
}
fn visit_join(&mut self, join: &'ast Join) {
let mut children = Vec::new();
self.visit_table_reference(&join.left);
children.push(self.children.pop().unwrap());
self.visit_table_reference(&join.right);
children.push(self.children.pop().unwrap());
match &join.condition {
JoinCondition::On(expr) => {
self.visit_expr(expr);
let child = self.children.pop().unwrap();
let condition_on_name = "ConditionOn".to_string();
let condition_on_format_ctx = AstFormatContext::with_children(condition_on_name, 1);
let condition_on_node =
FormatTreeNode::with_children(condition_on_format_ctx, vec![child]);
children.push(condition_on_node);
}
JoinCondition::Using(idents) => {
let mut using_children = Vec::with_capacity(idents.len());
for ident in idents.iter() {
self.visit_identifier(ident);
using_children.push(self.children.pop().unwrap());
}
let condition_using_name = "ConditionUsing".to_string();
let condition_using_format_ctx =
AstFormatContext::with_children(condition_using_name, using_children.len());
let condition_using_node =
FormatTreeNode::with_children(condition_using_format_ctx, using_children);
children.push(condition_using_node);
}
JoinCondition::Natural => {
let condition_natural_name = "ConditionNatural".to_string();
let condition_natural_format_ctx = AstFormatContext::new(condition_natural_name);
let condition_natural_node = FormatTreeNode::new(condition_natural_format_ctx);
children.push(condition_natural_node);
}
JoinCondition::None => {
let condition_name = "Condition".to_string();
let condition_format_ctx = AstFormatContext::new(condition_name);
let condition_node = FormatTreeNode::new(condition_format_ctx);
children.push(condition_node);
}
}
let name = "Join".to_string();
let format_ctx = AstFormatContext::with_children(name, children.len());
let node = FormatTreeNode::with_children(format_ctx, children);
self.children.push(node);
}
}
|
use input_i_scanner::InputIScanner;
fn main() {
let stdin = std::io::stdin();
let mut _i_i = InputIScanner::from(stdin.lock());
macro_rules! scan {
(($($t: ty),+)) => {
($(scan!($t)),+)
};
($t: ty) => {
_i_i.scan::<$t>() as $t
};
(($($t: ty),+); $n: expr) => {
std::iter::repeat_with(|| scan!(($($t),+))).take($n).collect::<Vec<_>>()
};
($t: ty; $n: expr) => {
std::iter::repeat_with(|| scan!($t)).take($n).collect::<Vec<_>>()
};
}
let n = scan!(usize);
let p = scan!(usize; n);
let mut p: Vec<usize> = p.into_iter().map(|p| p - 1).collect();
let mut position = vec![0; n];
for (i, &p) in p.iter().enumerate() {
position[p] = i;
}
let mut seen = vec![false; n];
let mut ans = Vec::new();
for i in 0..n {
if p[i] == i {
continue;
}
let j = position[i];
assert!(i < j);
for k in (i..j).rev() {
position.swap(p[k], p[k + 1]);
p.swap(k, k + 1);
if seen[k] {
println!("-1");
return;
}
seen[k] = true;
ans.push(k);
}
assert_eq!(p[i], i);
}
if ans.len() != n - 1 {
println!("-1");
return;
}
for ans in ans {
println!("{}", ans + 1);
}
}
|
use std::io;
use std::io::Read;
use regex::Regex;
fn main() {
let mut input = String::new();
io::stdin().read_to_string(&mut input).unwrap();
let re = Regex::new(r"(?m)^(\d+)-(\d+) (\w): (\w+)$", ).unwrap();
let count = re.captures_iter(&input).filter(|x| {
let (first, second, letter, password) = (&x[1], &x[2], &x[3], &x[4]);
let (first, second) = (first.parse::<usize>().unwrap(), second.parse::<usize>().unwrap());
let letter = letter.chars().next();
let mut iter = password.chars();
(iter.nth(first - 1) == letter) ^ (iter.nth(second - first - 1) == letter)
}).count();
println!("{}", count);
}
|
pub mod clint;
pub mod plic;
pub mod uart;
pub mod virtio_block_disk;
|
use amethyst::{
core::transform::Transform,
prelude::*,
renderer::{Camera},
ui::{Anchor, UiTransform},
};
use crate::sprite::storage::SpriteSheetStorage;
use crate::component::player::Player;
use crate::component::def::Side;
use crate::component::score::ScoreText;
use crate::component::rule::Rules;
use super::def::CurrentState;
use super::def::Game;
use super::def::UserAction;
use super::winner::WinnerState;
use super::def::WinnerMessage;
pub struct GameplayState;
impl SimpleState for GameplayState {
fn on_start(&mut self, data: StateData<'_, GameData<'_, '_>>) {
let world = data.world;
world.register::<Player>();
let storage = SpriteSheetStorage::new(world);
world.insert(storage);
let game = Game::default();
world.insert(game);
initialise_result(world);
initialise_scoreboard(world);
initialise_players(world);
initialise_camera(world);
}
fn update(&mut self, data: &mut StateData<'_, GameData<'_, '_>>) -> SimpleTrans {
let game = data.world.write_resource::<Game>();
match game.user_action {
UserAction::ShowWinner => return Trans::Push(Box::new(WinnerState)),
_ => ()
};
Trans::None
}
fn on_resume(&mut self, mut data: StateData<'_, GameData<'_, '_>>) {
data.world.write_resource::<Game>().current_state = CurrentState::GamePlay;
}
}
fn initialise_camera(world: &mut World) {
let mut transform = Transform::default();
transform.set_translation_xyz(super::ARENA_WIDTH * 0.5, super::ARENA_HEIGHT * 0.5, 1.0);
world
.create_entity()
.with(Camera::standard_2d(super::ARENA_WIDTH, super::ARENA_HEIGHT))
.with(transform)
.build();
}
fn initialise_players(world: &mut World) {
let mut left_transform = Transform::default();
let mut right_transform = Transform::default();
// Correctly position the players.
let y = super::ARENA_HEIGHT / 2.0;
left_transform.set_translation_xyz(crate::component::PLAYER_WIDTH * 1.0, y, 0.0);
right_transform.set_translation_xyz(super::ARENA_WIDTH - crate::component::PLAYER_WIDTH * 1.0, y, 0.0);
let user_player = Player::new(Side::User);
let user_player_sprite_render = user_player.get_sprite_render(world).clone();
let opponent_player = Player::new(Side::Opponent);
let opponent_player_sprite_render = opponent_player.get_sprite_render(world).clone();
world
.create_entity()
.with(user_player_sprite_render)
.with(user_player)
.with(right_transform)
.build()
;
world
.create_entity()
.with(opponent_player_sprite_render)
.with(opponent_player)
.with(left_transform)
.build()
;
}
fn initialise_result(world: &mut World) {
let winner_message_transform = UiTransform::new(
"winner_message".to_string(), Anchor::Middle, Anchor::Middle,
0., 0., 99., 600., 150.,
);
let winner = WinnerMessage::new(world, winner_message_transform, String::from(""));
world.insert(winner);
}
fn initialise_scoreboard(world: &mut World) {
let opponent_transform = UiTransform::new(
"opponent".to_string(), Anchor::TopMiddle, Anchor::TopMiddle,
-150., -150., 1., 200., 150.,
);
let user_transform = UiTransform::new(
"user".to_string(), Anchor::TopMiddle, Anchor::TopMiddle,
150., -150., 1., 200., 150.,
);
let rules_transform = UiTransform::new(
"rule".to_string(), Anchor::BottomMiddle, Anchor::BottomMiddle,
0., 150., 1., 1000., 50.,
);
let score_text = ScoreText::new(world, user_transform, opponent_transform);
world.insert(score_text);
let rules = Rules::new(world, rules_transform);
world.insert(rules);
} |
#[macro_use]
extern crate rental;
pub trait MyTrait { }
pub struct MyStruct { }
impl MyTrait for MyStruct { }
rental! {
pub mod rentals {
use ::MyTrait;
#[rental]
pub struct RentTrait {
my_trait: Box<MyTrait + 'static>,
my_suffix: &'my_trait (MyTrait + 'static),
}
}
}
#[test]
fn new() {
let _tr = rentals::RentTrait::new(
Box::new(MyStruct{}),
|t| &*t,
);
}
|
use std::cmp::{max, min};
use std::collections::{HashMap, HashSet};
use itertools::Itertools;
use whiteread::parse_line;
fn main() {
let (h, w): (usize, usize) = parse_line().unwrap();
let mut aa: Vec<Vec<usize>> = vec![];
for _ in 0..h {
aa.push(parse_line().unwrap());
}
let mut gyousums: Vec<usize> = vec![];
let mut retusums: Vec<usize> = vec![];
for i in 0..h {
gyousums.push(aa[i].iter().sum());
}
for i in 0..w {
let mut tmp = 0;
for j in 0..h {
tmp += aa[j][i];
}
retusums.push(tmp);
}
for i in 0..h {
for j in 0..w {
if j == w - 1 {
println!("{}", gyousums[i] + retusums[j] - aa[i][j]);
} else {
print!("{} ", gyousums[i] + retusums[j] - aa[i][j]);
}
}
}
}
|
use super::ppu::Ppu;
use super::spu::Spu;
use super::cpu::Cpu;
use super::GameboyType;
use super::interconnect::Interconnect;
pub use super::ppu::VideoSink;
pub use super::gamepad::{InputEvent,Gamepad,Button,ButtonState};
pub use super::cart::Cart;
pub struct Console {
cpu: Cpu,
}
impl Console {
pub fn new(cart: Cart) -> Console {
let gb_type = GameboyType::Dmg;
let interconnect = Interconnect::new(
gb_type,
cart,
Ppu::new(),
Spu::new(),
Gamepad::new());
Console {
cpu: Cpu::new(gb_type, interconnect)
}
}
pub fn run_for_one_frame(&mut self, video_sink: &mut dyn VideoSink) {
let mut frame_handler = FrameHandler::new(video_sink);
while !frame_handler.frame_available {
self.cpu.step(&mut frame_handler);
}
}
pub fn handle_event(&mut self, input_event: InputEvent) {
self.cpu.interconnect.gamepad.handle_event(input_event)
}
pub fn copy_cart_ram(&self) -> Option<Box<[u8]>> {
self.cpu.interconnect.cart.copy_ram()
}
}
struct FrameHandler<'a> {
frame_available: bool,
video_sink: &'a mut dyn VideoSink,
}
impl<'a> FrameHandler<'a> {
fn new(video_sink: &'a mut dyn VideoSink) -> FrameHandler<'a> {
FrameHandler {
frame_available: false,
video_sink,
}
}
}
impl<'a> VideoSink for FrameHandler<'a> {
fn frame_available(&mut self, frame: &Box<[u32]>) {
self.video_sink.frame_available(frame);
self.frame_available = true
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.