text stringlengths 8 4.13M |
|---|
use itertools::Itertools;
use raster::Color;
use std::collections::HashMap;
use std::convert::Infallible;
use std::str::FromStr;
fn get_attribute(attributes: &HashMap<&str, &str>, key: &str) -> Option<String> {
attributes
.get(&key)
.and_then(|x| -> Option<String> { Some(x.to_string()) })
}
#[derive(Default, Debug)]
struct Passport {
// Birth Year
byr: Option<String>,
// Issue Year
iyr: Option<String>,
// Expiration Year
eyr: Option<String>,
// Height
hgt: Option<String>,
// Hair Color
hcl: Option<String>,
// Eye Color
ecl: Option<String>,
// Passport ID
pid: Option<String>,
// Country ID
cid: Option<String>,
}
impl Passport {
fn is_valid(&self) -> bool {
return self.byr.is_some()
&& self.iyr.is_some()
&& self.eyr.is_some()
&& self.hgt.is_some()
&& self.hcl.is_some()
&& self.ecl.is_some()
&& self.pid.is_some();
}
fn is_extra_valid(&self) -> bool {
let byr: usize = self.byr.as_ref().unwrap().parse::<usize>().unwrap();
if byr.to_string().len() != 4 {
return false;
}
if byr < 1920 || byr > 2002 {
return false;
}
let iyr: usize = self.iyr.as_ref().unwrap().parse::<usize>().unwrap();
if iyr.to_string().len() != 4 {
return false;
}
if iyr < 2010 || iyr > 2020 {
return false;
}
let eyr: usize = self.eyr.as_ref().unwrap().parse::<usize>().unwrap();
if eyr.to_string().len() != 4 {
return false;
}
if eyr < 2020 || eyr > 2030 {
return false;
}
let hgt: &String = self.hgt.as_ref().unwrap();
let height_type: String = hgt
.chars()
.rev()
.take(2)
.collect::<Vec<_>>()
.into_iter()
.rev()
.collect::<String>();
if height_type != "cm" && height_type != "in" {
return false;
}
let height = hgt[..hgt.len() - 2].parse::<usize>().expect("Parse height");
if height_type == "cm" && (height < 150 || height > 193) {
return false;
} else if height_type == "in" && (height < 59 || height > 76) {
return false;
}
let hcl: &String = self.hcl.as_ref().unwrap();
if hcl.len() != 7 {
return false;
}
let hair_color = Color::hex(hcl);
if hair_color.is_err() {
return false;
}
let ecl: &String = self.ecl.as_ref().unwrap();
match ecl.as_str() {
"amb" | "blu" | "brn" | "gry" | "grn" | "hzl" | "oth" => (),
_ => return false,
};
let pid: &String = self.pid.as_ref().unwrap();
if pid.len() != 9 {
return false;
}
let is_numeric_pin = pid.chars().all(char::is_numeric);
if !is_numeric_pin {
return false;
}
return true;
}
}
impl FromStr for Passport {
type Err = Infallible;
fn from_str(data: &str) -> Result<Self, Self::Err> {
let attributes: HashMap<&str, &str> = data
.split_terminator(' ')
.map(|x| {
let tuple: (&str, &str) = x.split(":").next_tuple().expect("Parsing key value");
tuple
})
.collect();
Ok(Passport {
byr: get_attribute(&attributes, &"byr"),
iyr: get_attribute(&attributes, &"iyr"),
eyr: get_attribute(&attributes, &"eyr"),
hgt: get_attribute(&attributes, &"hgt"),
hcl: get_attribute(&attributes, &"hcl"),
ecl: get_attribute(&attributes, &"ecl"),
pid: get_attribute(&attributes, &"pid"),
cid: get_attribute(&attributes, &"cid"),
})
}
}
pub fn part1(input: &str) -> Result<usize, String> {
Ok(input
.split("\n\n")
.map(|x| x.replace("\n", " "))
.map(|p| p.parse::<Passport>().expect("Parse password"))
.filter(|x| x.is_valid())
.count())
}
pub fn part2(input: &str) -> Result<usize, String> {
// 120 too low
Ok(input
.split("\n\n")
.map(|x| x.replace("\n", " "))
.map(|p| p.parse::<Passport>().expect("Parse password"))
.filter(|x| x.is_valid())
.filter(|x| x.is_extra_valid())
.count())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_part1() {
const INPUT: &str = "ecl:gry pid:860033327 eyr:2020 hcl:#fffffd\nbyr:1937 iyr:2017 cid:147 hgt:183cm\n\niyr:2013 ecl:amb cid:350 eyr:2023 pid:028048884\nhcl:#cfa07d byr:1929\n\nhcl:#ae17e1 iyr:2013\neyr:2024\necl:brn pid:760753108 byr:1931\nhgt:179cm\n\nhcl:#cfa07d eyr:2025 pid:166559648\niyr:2011 ecl:brn hgt:59in\n";
assert_eq!(part1(INPUT).unwrap(), 2);
}
#[test]
fn test_part2() {
const INVALID: &str = "eyr:1972 cid:100\nhcl:#18171d ecl:amb hgt:170 pid:186cm iyr:2018 byr:1926\n\niyr:2019\nhcl:#602927 eyr:1967 hgt:170cm\necl:grn pid:012533040 byr:1946\n\nhcl:dab227 iyr:2012\necl:brn hgt:182cm pid:021572410 eyr:2020 byr:1992 cid:277\n\nhgt:59cm ecl:zzz\neyr:2038 hcl:74454a iyr:2023\npid:3556412378 byr:2007\n";
const VALID: &str = "pid:087499704 hgt:74in ecl:grn iyr:2012 eyr:2030 byr:1980\nhcl:#623a2f\n\neyr:2029 ecl:blu cid:129 byr:1989\niyr:2014 pid:896056539 hcl:#a97842 hgt:165cm\n\nhcl:#888785\nhgt:164cm byr:2001 iyr:2015 cid:88\npid:545766238 ecl:hzl\neyr:2022\n\niyr:2010 hgt:158cm hcl:#b6652a ecl:blu byr:1944 eyr:2021 pid:093154719\n";
assert_eq!(part2(INVALID).unwrap(), 0);
assert_eq!(part2(VALID).unwrap(), 4);
}
}
|
/// An enum to represent all characters in the Grantha block.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum Grantha {
/// \u{11300}: '𑌀'
SignCombiningAnusvaraAbove,
/// \u{11301}: '𑌁'
SignCandrabindu,
/// \u{11302}: '𑌂'
SignAnusvara,
/// \u{11303}: '𑌃'
SignVisarga,
/// \u{11305}: '𑌅'
LetterA,
/// \u{11306}: '𑌆'
LetterAa,
/// \u{11307}: '𑌇'
LetterI,
/// \u{11308}: '𑌈'
LetterIi,
/// \u{11309}: '𑌉'
LetterU,
/// \u{1130a}: '𑌊'
LetterUu,
/// \u{1130b}: '𑌋'
LetterVocalicR,
/// \u{1130c}: '𑌌'
LetterVocalicL,
/// \u{1130f}: '𑌏'
LetterEe,
/// \u{11310}: '𑌐'
LetterAi,
/// \u{11313}: '𑌓'
LetterOo,
/// \u{11314}: '𑌔'
LetterAu,
/// \u{11315}: '𑌕'
LetterKa,
/// \u{11316}: '𑌖'
LetterKha,
/// \u{11317}: '𑌗'
LetterGa,
/// \u{11318}: '𑌘'
LetterGha,
/// \u{11319}: '𑌙'
LetterNga,
/// \u{1131a}: '𑌚'
LetterCa,
/// \u{1131b}: '𑌛'
LetterCha,
/// \u{1131c}: '𑌜'
LetterJa,
/// \u{1131d}: '𑌝'
LetterJha,
/// \u{1131e}: '𑌞'
LetterNya,
/// \u{1131f}: '𑌟'
LetterTta,
/// \u{11320}: '𑌠'
LetterTtha,
/// \u{11321}: '𑌡'
LetterDda,
/// \u{11322}: '𑌢'
LetterDdha,
/// \u{11323}: '𑌣'
LetterNna,
/// \u{11324}: '𑌤'
LetterTa,
/// \u{11325}: '𑌥'
LetterTha,
/// \u{11326}: '𑌦'
LetterDa,
/// \u{11327}: '𑌧'
LetterDha,
/// \u{11328}: '𑌨'
LetterNa,
/// \u{1132a}: '𑌪'
LetterPa,
/// \u{1132b}: '𑌫'
LetterPha,
/// \u{1132c}: '𑌬'
LetterBa,
/// \u{1132d}: '𑌭'
LetterBha,
/// \u{1132e}: '𑌮'
LetterMa,
/// \u{1132f}: '𑌯'
LetterYa,
/// \u{11330}: '𑌰'
LetterRa,
/// \u{11332}: '𑌲'
LetterLa,
/// \u{11333}: '𑌳'
LetterLla,
/// \u{11335}: '𑌵'
LetterVa,
/// \u{11336}: '𑌶'
LetterSha,
/// \u{11337}: '𑌷'
LetterSsa,
/// \u{11338}: '𑌸'
LetterSa,
/// \u{11339}: '𑌹'
LetterHa,
/// \u{1133b}: '𑌻'
CombiningBinduBelow,
/// \u{1133c}: '𑌼'
SignNukta,
/// \u{1133d}: '𑌽'
SignAvagraha,
/// \u{1133e}: '𑌾'
VowelSignAa,
/// \u{1133f}: '𑌿'
VowelSignI,
/// \u{11340}: '𑍀'
VowelSignIi,
/// \u{11341}: '𑍁'
VowelSignU,
/// \u{11342}: '𑍂'
VowelSignUu,
/// \u{11343}: '𑍃'
VowelSignVocalicR,
/// \u{11344}: '𑍄'
VowelSignVocalicRr,
/// \u{11347}: '𑍇'
VowelSignEe,
/// \u{11348}: '𑍈'
VowelSignAi,
/// \u{1134b}: '𑍋'
VowelSignOo,
/// \u{1134c}: '𑍌'
VowelSignAu,
/// \u{1134d}: '𑍍'
SignVirama,
/// \u{11350}: '𑍐'
Om,
/// \u{11357}: '𑍗'
AuLengthMark,
/// \u{1135d}: '𑍝'
SignPluta,
/// \u{1135e}: '𑍞'
LetterVedicAnusvara,
/// \u{1135f}: '𑍟'
LetterVedicDoubleAnusvara,
/// \u{11360}: '𑍠'
LetterVocalicRr,
/// \u{11361}: '𑍡'
LetterVocalicLl,
/// \u{11362}: '𑍢'
VowelSignVocalicL,
/// \u{11363}: '𑍣'
VowelSignVocalicLl,
/// \u{11366}: '𑍦'
CombiningDigitZero,
/// \u{11367}: '𑍧'
CombiningDigitOne,
/// \u{11368}: '𑍨'
CombiningDigitTwo,
/// \u{11369}: '𑍩'
CombiningDigitThree,
/// \u{1136a}: '𑍪'
CombiningDigitFour,
/// \u{1136b}: '𑍫'
CombiningDigitFive,
/// \u{1136c}: '𑍬'
CombiningDigitSix,
/// \u{11370}: '𑍰'
CombiningLetterA,
/// \u{11371}: '𑍱'
CombiningLetterKa,
/// \u{11372}: '𑍲'
CombiningLetterNa,
/// \u{11373}: '𑍳'
CombiningLetterVi,
/// \u{11374}: '𑍴'
CombiningLetterPa,
}
impl Into<char> for Grantha {
fn into(self) -> char {
match self {
Grantha::SignCombiningAnusvaraAbove => '𑌀',
Grantha::SignCandrabindu => '𑌁',
Grantha::SignAnusvara => '𑌂',
Grantha::SignVisarga => '𑌃',
Grantha::LetterA => '𑌅',
Grantha::LetterAa => '𑌆',
Grantha::LetterI => '𑌇',
Grantha::LetterIi => '𑌈',
Grantha::LetterU => '𑌉',
Grantha::LetterUu => '𑌊',
Grantha::LetterVocalicR => '𑌋',
Grantha::LetterVocalicL => '𑌌',
Grantha::LetterEe => '𑌏',
Grantha::LetterAi => '𑌐',
Grantha::LetterOo => '𑌓',
Grantha::LetterAu => '𑌔',
Grantha::LetterKa => '𑌕',
Grantha::LetterKha => '𑌖',
Grantha::LetterGa => '𑌗',
Grantha::LetterGha => '𑌘',
Grantha::LetterNga => '𑌙',
Grantha::LetterCa => '𑌚',
Grantha::LetterCha => '𑌛',
Grantha::LetterJa => '𑌜',
Grantha::LetterJha => '𑌝',
Grantha::LetterNya => '𑌞',
Grantha::LetterTta => '𑌟',
Grantha::LetterTtha => '𑌠',
Grantha::LetterDda => '𑌡',
Grantha::LetterDdha => '𑌢',
Grantha::LetterNna => '𑌣',
Grantha::LetterTa => '𑌤',
Grantha::LetterTha => '𑌥',
Grantha::LetterDa => '𑌦',
Grantha::LetterDha => '𑌧',
Grantha::LetterNa => '𑌨',
Grantha::LetterPa => '𑌪',
Grantha::LetterPha => '𑌫',
Grantha::LetterBa => '𑌬',
Grantha::LetterBha => '𑌭',
Grantha::LetterMa => '𑌮',
Grantha::LetterYa => '𑌯',
Grantha::LetterRa => '𑌰',
Grantha::LetterLa => '𑌲',
Grantha::LetterLla => '𑌳',
Grantha::LetterVa => '𑌵',
Grantha::LetterSha => '𑌶',
Grantha::LetterSsa => '𑌷',
Grantha::LetterSa => '𑌸',
Grantha::LetterHa => '𑌹',
Grantha::CombiningBinduBelow => '𑌻',
Grantha::SignNukta => '𑌼',
Grantha::SignAvagraha => '𑌽',
Grantha::VowelSignAa => '𑌾',
Grantha::VowelSignI => '𑌿',
Grantha::VowelSignIi => '𑍀',
Grantha::VowelSignU => '𑍁',
Grantha::VowelSignUu => '𑍂',
Grantha::VowelSignVocalicR => '𑍃',
Grantha::VowelSignVocalicRr => '𑍄',
Grantha::VowelSignEe => '𑍇',
Grantha::VowelSignAi => '𑍈',
Grantha::VowelSignOo => '𑍋',
Grantha::VowelSignAu => '𑍌',
Grantha::SignVirama => '𑍍',
Grantha::Om => '𑍐',
Grantha::AuLengthMark => '𑍗',
Grantha::SignPluta => '𑍝',
Grantha::LetterVedicAnusvara => '𑍞',
Grantha::LetterVedicDoubleAnusvara => '𑍟',
Grantha::LetterVocalicRr => '𑍠',
Grantha::LetterVocalicLl => '𑍡',
Grantha::VowelSignVocalicL => '𑍢',
Grantha::VowelSignVocalicLl => '𑍣',
Grantha::CombiningDigitZero => '𑍦',
Grantha::CombiningDigitOne => '𑍧',
Grantha::CombiningDigitTwo => '𑍨',
Grantha::CombiningDigitThree => '𑍩',
Grantha::CombiningDigitFour => '𑍪',
Grantha::CombiningDigitFive => '𑍫',
Grantha::CombiningDigitSix => '𑍬',
Grantha::CombiningLetterA => '𑍰',
Grantha::CombiningLetterKa => '𑍱',
Grantha::CombiningLetterNa => '𑍲',
Grantha::CombiningLetterVi => '𑍳',
Grantha::CombiningLetterPa => '𑍴',
}
}
}
impl std::convert::TryFrom<char> for Grantha {
type Error = ();
fn try_from(c: char) -> Result<Self, Self::Error> {
match c {
'𑌀' => Ok(Grantha::SignCombiningAnusvaraAbove),
'𑌁' => Ok(Grantha::SignCandrabindu),
'𑌂' => Ok(Grantha::SignAnusvara),
'𑌃' => Ok(Grantha::SignVisarga),
'𑌅' => Ok(Grantha::LetterA),
'𑌆' => Ok(Grantha::LetterAa),
'𑌇' => Ok(Grantha::LetterI),
'𑌈' => Ok(Grantha::LetterIi),
'𑌉' => Ok(Grantha::LetterU),
'𑌊' => Ok(Grantha::LetterUu),
'𑌋' => Ok(Grantha::LetterVocalicR),
'𑌌' => Ok(Grantha::LetterVocalicL),
'𑌏' => Ok(Grantha::LetterEe),
'𑌐' => Ok(Grantha::LetterAi),
'𑌓' => Ok(Grantha::LetterOo),
'𑌔' => Ok(Grantha::LetterAu),
'𑌕' => Ok(Grantha::LetterKa),
'𑌖' => Ok(Grantha::LetterKha),
'𑌗' => Ok(Grantha::LetterGa),
'𑌘' => Ok(Grantha::LetterGha),
'𑌙' => Ok(Grantha::LetterNga),
'𑌚' => Ok(Grantha::LetterCa),
'𑌛' => Ok(Grantha::LetterCha),
'𑌜' => Ok(Grantha::LetterJa),
'𑌝' => Ok(Grantha::LetterJha),
'𑌞' => Ok(Grantha::LetterNya),
'𑌟' => Ok(Grantha::LetterTta),
'𑌠' => Ok(Grantha::LetterTtha),
'𑌡' => Ok(Grantha::LetterDda),
'𑌢' => Ok(Grantha::LetterDdha),
'𑌣' => Ok(Grantha::LetterNna),
'𑌤' => Ok(Grantha::LetterTa),
'𑌥' => Ok(Grantha::LetterTha),
'𑌦' => Ok(Grantha::LetterDa),
'𑌧' => Ok(Grantha::LetterDha),
'𑌨' => Ok(Grantha::LetterNa),
'𑌪' => Ok(Grantha::LetterPa),
'𑌫' => Ok(Grantha::LetterPha),
'𑌬' => Ok(Grantha::LetterBa),
'𑌭' => Ok(Grantha::LetterBha),
'𑌮' => Ok(Grantha::LetterMa),
'𑌯' => Ok(Grantha::LetterYa),
'𑌰' => Ok(Grantha::LetterRa),
'𑌲' => Ok(Grantha::LetterLa),
'𑌳' => Ok(Grantha::LetterLla),
'𑌵' => Ok(Grantha::LetterVa),
'𑌶' => Ok(Grantha::LetterSha),
'𑌷' => Ok(Grantha::LetterSsa),
'𑌸' => Ok(Grantha::LetterSa),
'𑌹' => Ok(Grantha::LetterHa),
'𑌻' => Ok(Grantha::CombiningBinduBelow),
'𑌼' => Ok(Grantha::SignNukta),
'𑌽' => Ok(Grantha::SignAvagraha),
'𑌾' => Ok(Grantha::VowelSignAa),
'𑌿' => Ok(Grantha::VowelSignI),
'𑍀' => Ok(Grantha::VowelSignIi),
'𑍁' => Ok(Grantha::VowelSignU),
'𑍂' => Ok(Grantha::VowelSignUu),
'𑍃' => Ok(Grantha::VowelSignVocalicR),
'𑍄' => Ok(Grantha::VowelSignVocalicRr),
'𑍇' => Ok(Grantha::VowelSignEe),
'𑍈' => Ok(Grantha::VowelSignAi),
'𑍋' => Ok(Grantha::VowelSignOo),
'𑍌' => Ok(Grantha::VowelSignAu),
'𑍍' => Ok(Grantha::SignVirama),
'𑍐' => Ok(Grantha::Om),
'𑍗' => Ok(Grantha::AuLengthMark),
'𑍝' => Ok(Grantha::SignPluta),
'𑍞' => Ok(Grantha::LetterVedicAnusvara),
'𑍟' => Ok(Grantha::LetterVedicDoubleAnusvara),
'𑍠' => Ok(Grantha::LetterVocalicRr),
'𑍡' => Ok(Grantha::LetterVocalicLl),
'𑍢' => Ok(Grantha::VowelSignVocalicL),
'𑍣' => Ok(Grantha::VowelSignVocalicLl),
'𑍦' => Ok(Grantha::CombiningDigitZero),
'𑍧' => Ok(Grantha::CombiningDigitOne),
'𑍨' => Ok(Grantha::CombiningDigitTwo),
'𑍩' => Ok(Grantha::CombiningDigitThree),
'𑍪' => Ok(Grantha::CombiningDigitFour),
'𑍫' => Ok(Grantha::CombiningDigitFive),
'𑍬' => Ok(Grantha::CombiningDigitSix),
'𑍰' => Ok(Grantha::CombiningLetterA),
'𑍱' => Ok(Grantha::CombiningLetterKa),
'𑍲' => Ok(Grantha::CombiningLetterNa),
'𑍳' => Ok(Grantha::CombiningLetterVi),
'𑍴' => Ok(Grantha::CombiningLetterPa),
_ => Err(()),
}
}
}
impl Into<u32> for Grantha {
fn into(self) -> u32 {
let c: char = self.into();
let hex = c
.escape_unicode()
.to_string()
.replace("\\u{", "")
.replace("}", "");
u32::from_str_radix(&hex, 16).unwrap()
}
}
impl std::convert::TryFrom<u32> for Grantha {
type Error = ();
fn try_from(u: u32) -> Result<Self, Self::Error> {
if let Ok(c) = char::try_from(u) {
Self::try_from(c)
} else {
Err(())
}
}
}
impl Iterator for Grantha {
type Item = Self;
fn next(&mut self) -> Option<Self> {
let index: u32 = (*self).into();
use std::convert::TryFrom;
Self::try_from(index + 1).ok()
}
}
impl Grantha {
/// The character with the lowest index in this unicode block
pub fn new() -> Self {
Grantha::SignCombiningAnusvaraAbove
}
/// The character's name, in sentence case
pub fn name(&self) -> String {
let s = std::format!("Grantha{:#?}", self);
string_morph::to_sentence_case(&s)
}
}
|
use crate::treeer::core::StateAccessor;
/// This is for tags which be self contained.
pub trait SelfContainedTag: StateAccessor {}
|
//!
//! Measurement to be Stored
//!
use crate::Value;
use crate::Precision;
use crate::Utc;
use crate::DateTime;
use std::collections::BTreeMap;
/// The smallest unit of recording. Multiple of these Measurements are fit in a [Record](struct.Record.html), which in
/// turn is submitted to InfluxDB.
#[derive(Debug, Deserialize, Serialize)]
pub struct Measurement
{
pub(crate) name: String,
pub(crate) tags: BTreeMap<String, String>,
pub(crate) fields: BTreeMap<String, Value>,
pub(crate) timestamp: DateTime,
}
impl Measurement
{
pub(crate) fn new(name: &str) -> Self
{
Self {
name: name.to_owned(),
tags: BTreeMap::new(),
fields: BTreeMap::new(),
// TODO stamp in Drop of a MeasurementBuilder taking reference to collection in Record
timestamp: Utc::now(),
}
}
/// Set datetime of this Measurement
pub fn timestamp(&mut self, timestamp: DateTime) -> &mut Self
{
self.timestamp = timestamp; self
}
/// Add a tag to this Measurement
pub fn tag(&mut self, key: &str, value: &str) -> &mut Self
{
self.tags.insert(key.to_owned(), value.to_owned());
self
}
/// Add a value field to this Measurement
pub fn field<V: Into<Value>>(&mut self, key: &str, value: V) -> &mut Self
{
self.fields.insert(key.to_owned(), value.into());
self
}
pub(crate) fn to_line(&self, precision: &Precision) -> String
{
let mut line = self.name.to_owned();
if ! self.tags.is_empty()
{
let tagline = self.tags.iter()
.map(|(k, v)| format!("{}={}", k, v))
.collect::<Vec<String>>()
.join(",");
line += ",";
line += &tagline;
}
if ! self.fields.is_empty()
{
let fieldline = self.fields.iter()
.map(|(k, v)| format!("{}={}", k, v))
.collect::<Vec<String>>()
.join(",");
line += " ";
line += &fieldline;
}
line += " ";
match precision
{
Precision::Nanoseconds => { line += &self.timestamp.timestamp_nanos().to_string(); }
Precision::Microseconds => { line += &(self.timestamp.timestamp_nanos() * 1000).to_string(); }
Precision::Milliseconds => { line += &self.timestamp.timestamp_millis().to_string(); }
Precision::Seconds => { line += &(self.timestamp.timestamp() ).to_string(); }
}
line
}
}
|
extern crate iron;
extern crate mount;
extern crate router;
extern crate staticfile;
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate serde_json;
use iron::status;
use iron::{Iron, Request, Response, IronResult};
use iron::mime::Mime;
use mount::Mount;
use router::Router;
use staticfile::Static;
use std::path::Path;
#[derive(Serialize, Deserialize, Debug)]
struct Point {
x: i32,
y: i32,
}
fn json_test(req: &mut Request) -> IronResult<Response> {
println!("Running json_test handler, URL path: {}", req.url.path().join("/"));
let content_type = "application/json".parse::<Mime>().unwrap();
let point = Point { x: 1, y: 2 };
let serialized = serde_json::to_string(&point).unwrap();
Ok(Response::with((content_type, status::Ok, serialized)))
}
fn main() {
let addres: &'static str = "127.0.0.1:3000";
let mut router = Router::new();
router
.get("/json", json_test, "json");
let mut mount = Mount::new();
mount
.mount("/", Static::new(Path::new("static")))
.mount("/api", router);
println!("Server started at http://{}/", &addres);
Iron::new(mount).http(addres).unwrap();
} |
use clap::{App, ArgMatches, SubCommand};
use std::process;
use database::DB;
pub fn make_subcommand<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("search")
.about("Search bookmark")
.arg_from_usage("<KEYWORD>... 'Search bookmarks with keywords in title or URL'")
.arg_from_usage("-t --tag 'Search bookmark with tag'")
}
pub fn execute(args: &ArgMatches) {
let db = DB::open();
let keywords: Vec<&str> = args.values_of("KEYWORD")
.unwrap().collect();
let bookmarks = if args.is_present("tag") {
db.search_by_tag(keywords)
} else {
db.search(keywords)
};
if bookmarks.len() == 0 {
println!("Error: No matching any bookmark");
process::exit(1);
}
for bookmark in bookmarks {
bookmark.print();
}
}
|
use aes::cipher::generic_array::GenericArray;
use aes::cipher::{BlockCipher, NewBlockCipher};
use aes::Aes128;
use std::io::BufWriter;
use byteorder::{BigEndian, WriteBytesExt};
use crate::error::Error;
pub const LABEL_SRTP_ENCRYPTION: u8 = 0x00;
pub const LABEL_SRTP_AUTHENTICATION_TAG: u8 = 0x01;
pub const LABEL_SRTP_SALT: u8 = 0x02;
pub const LABEL_SRTCP_ENCRYPTION: u8 = 0x03;
pub const LABEL_SRTCP_AUTHENTICATION_TAG: u8 = 0x04;
pub const LABEL_SRTCP_SALT: u8 = 0x05;
pub(crate) const SRTCP_INDEX_SIZE: usize = 4;
pub(crate) fn aes_cm_key_derivation(
label: u8,
master_key: &[u8],
master_salt: &[u8],
index_over_kdr: usize,
out_len: usize,
) -> Result<Vec<u8>, Error> {
if index_over_kdr != 0 {
// 24-bit "index DIV kdr" must be xored to prf input.
return Err(Error::UnsupportedIndexOverKdr);
}
// https://tools.ietf.org/html/rfc3711#appendix-B.3
// The input block for AES-CM is generated by exclusive-oring the master salt with the
// concatenation of the encryption key label 0x00 with (index DIV kdr),
// - index is 'rollover count' and DIV is 'divided by'
let n_master_key = master_key.len();
let n_master_salt = master_salt.len();
let mut prf_in = vec![0u8; n_master_key];
prf_in[..n_master_salt].copy_from_slice(master_salt);
prf_in[7] ^= label;
//The resulting value is then AES encrypted using the master key to get the cipher key.
let key = GenericArray::from_slice(master_key);
let block = Aes128::new(&key);
let mut out = vec![0u8; ((out_len + n_master_key) / n_master_key) * n_master_key];
for (i, n) in (0..out_len).step_by(n_master_key).enumerate() {
//BigEndian.PutUint16(prfIn[nMasterKey-2:], i)
prf_in[n_master_key - 2] = ((i >> 8) & 0xFF) as u8;
prf_in[n_master_key - 1] = (i & 0xFF) as u8;
out[n..n + n_master_key].copy_from_slice(&prf_in);
let out_key = GenericArray::from_mut_slice(&mut out[n..n + n_master_key]);
block.encrypt_block(out_key);
}
Ok(out[..out_len].to_vec())
}
/// Generate IV https://tools.ietf.org/html/rfc3711#section-4.1.1
/// where the 128-bit integer value IV SHALL be defined by the SSRC, the
/// SRTP packet index i, and the SRTP session salting key k_s, as below.
/// ROC = a 32-bit unsigned rollover counter (roc), which records how many
/// times the 16-bit RTP sequence number has been reset to zero after
/// passing through 65,535
/// ```nobuild
/// i = 2^16 * roc + SEQ
/// IV = (salt*2 ^ 16) | (ssrc*2 ^ 64) | (i*2 ^ 16)
/// ```
pub(crate) fn generate_counter(
sequence_number: u16,
rollover_counter: u32,
ssrc: u32,
session_salt: &[u8],
) -> Result<Vec<u8>, Error> {
assert!(session_salt.len() <= 16);
let mut counter: Vec<u8> = vec![0; 16];
{
let mut writer = BufWriter::<&mut [u8]>::new(counter[4..].as_mut());
writer.write_u32::<BigEndian>(ssrc)?;
writer.write_u32::<BigEndian>(rollover_counter)?;
writer.write_u32::<BigEndian>((sequence_number as u32) << 16)?;
}
for i in 0..session_salt.len() {
counter[i] ^= session_salt[i];
}
Ok(counter)
}
#[cfg(test)]
mod test {
use super::*;
use crate::protection_profile::*;
#[test]
fn test_valid_session_keys() -> Result<(), Error> {
// Key Derivation Test Vectors from https://tools.ietf.org/html/rfc3711#appendix-B.3
let master_key = vec![
0xE1, 0xF9, 0x7A, 0x0D, 0x3E, 0x01, 0x8B, 0xE0, 0xD6, 0x4F, 0xA3, 0x2C, 0x06, 0xDE,
0x41, 0x39,
];
let master_salt = vec![
0x0E, 0xC6, 0x75, 0xAD, 0x49, 0x8A, 0xFE, 0xEB, 0xB6, 0x96, 0x0B, 0x3A, 0xAB, 0xE6,
];
let expected_session_key = vec![
0xC6, 0x1E, 0x7A, 0x93, 0x74, 0x4F, 0x39, 0xEE, 0x10, 0x73, 0x4A, 0xFE, 0x3F, 0xF7,
0xA0, 0x87,
];
let expected_session_salt = vec![
0x30, 0xCB, 0xBC, 0x08, 0x86, 0x3D, 0x8C, 0x85, 0xD4, 0x9D, 0xB3, 0x4A, 0x9A, 0xE1,
];
let expected_session_auth_tag = vec![
0xCE, 0xBE, 0x32, 0x1F, 0x6F, 0xF7, 0x71, 0x6B, 0x6F, 0xD4, 0xAB, 0x49, 0xAF, 0x25,
0x6A, 0x15, 0x6D, 0x38, 0xBA, 0xA4,
];
let session_key = aes_cm_key_derivation(
LABEL_SRTP_ENCRYPTION,
&master_key,
&master_salt,
0,
master_key.len(),
)?;
assert_eq!(
session_key, expected_session_key,
"Session Key:\n{:?} \ndoes not match expected:\n{:?}\nMaster Key:\n{:?}\nMaster Salt:\n{:?}\n",
session_key, expected_session_key, master_key, master_salt,
);
let session_salt = aes_cm_key_derivation(
LABEL_SRTP_SALT,
&master_key,
&master_salt,
0,
master_salt.len(),
)?;
assert_eq!(
session_salt, expected_session_salt,
"Session Salt {:?} does not match expected {:?}",
session_salt, expected_session_salt
);
let auth_key_len = ProtectionProfile::Aes128CmHmacSha1_80.auth_key_len();
let session_auth_tag = aes_cm_key_derivation(
LABEL_SRTP_AUTHENTICATION_TAG,
&master_key,
&master_salt,
0,
auth_key_len,
)?;
assert_eq!(
session_auth_tag, expected_session_auth_tag,
"Session Auth Tag {:?} does not match expected {:?}",
session_auth_tag, expected_session_auth_tag,
);
Ok(())
}
// This test asserts that calling aesCmKeyDerivation with a non-zero indexOverKdr fails
// Currently this isn't supported, but the API makes sure we can add this in the future
#[test]
fn test_index_over_kdr() -> Result<(), Error> {
let result = aes_cm_key_derivation(LABEL_SRTP_AUTHENTICATION_TAG, &[], &[], 1, 0);
assert!(result.is_err());
Ok(())
}
}
|
use std::fmt;
#[derive(Debug, PartialEq, Clone, Copy)]
/// The color type used to represent this image
pub enum ColorType {
/// Grayscale, with one color channel
Grayscale,
/// RGB, with three color channels
RGB,
/// Indexed, with one byte per pixel representing one of up to 256 colors in the image
Indexed,
/// Grayscale + Alpha, with two color channels
GrayscaleAlpha,
/// RGBA, with four color channels
RGBA,
}
impl fmt::Display for ColorType {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
match *self {
ColorType::Grayscale => "Grayscale",
ColorType::RGB => "RGB",
ColorType::Indexed => "Indexed",
ColorType::GrayscaleAlpha => "Grayscale + Alpha",
ColorType::RGBA => "RGB + Alpha",
}
)
}
}
impl ColorType {
/// Get the code used by the PNG specification to denote this color type
#[inline]
pub fn png_header_code(self) -> u8 {
match self {
ColorType::Grayscale => 0,
ColorType::RGB => 2,
ColorType::Indexed => 3,
ColorType::GrayscaleAlpha => 4,
ColorType::RGBA => 6,
}
}
#[inline]
pub fn channels_per_pixel(self) -> u8 {
match self {
ColorType::Grayscale | ColorType::Indexed => 1,
ColorType::GrayscaleAlpha => 2,
ColorType::RGB => 3,
ColorType::RGBA => 4,
}
}
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
/// The number of bits to be used per channel per pixel
pub enum BitDepth {
/// One bit per channel per pixel
One,
/// Two bits per channel per pixel
Two,
/// Four bits per channel per pixel
Four,
/// Eight bits per channel per pixel
Eight,
/// Sixteen bits per channel per pixel
Sixteen,
}
impl fmt::Display for BitDepth {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
match *self {
BitDepth::One => "1",
BitDepth::Two => "2",
BitDepth::Four => "4",
BitDepth::Eight => "8",
BitDepth::Sixteen => "16",
}
)
}
}
impl BitDepth {
/// Retrieve the number of bits per channel per pixel as a `u8`
#[inline]
pub fn as_u8(self) -> u8 {
match self {
BitDepth::One => 1,
BitDepth::Two => 2,
BitDepth::Four => 4,
BitDepth::Eight => 8,
BitDepth::Sixteen => 16,
}
}
/// Parse a number of bits per channel per pixel into a `BitDepth`
#[inline]
pub fn from_u8(depth: u8) -> BitDepth {
match depth {
1 => BitDepth::One,
2 => BitDepth::Two,
4 => BitDepth::Four,
8 => BitDepth::Eight,
16 => BitDepth::Sixteen,
_ => panic!("Unsupported bit depth"),
}
}
}
#[derive(Debug, PartialEq, Clone, Copy, Eq, Hash)]
/// Potential optimization methods for alpha channel
pub enum AlphaOptim {
NoOp,
Black,
White,
Up,
Right,
Down,
Left,
}
impl fmt::Display for AlphaOptim {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
match *self {
AlphaOptim::NoOp => "_",
AlphaOptim::Black => "B",
AlphaOptim::White => "W",
AlphaOptim::Up => "U",
AlphaOptim::Right => "R",
AlphaOptim::Down => "D",
AlphaOptim::Left => "L",
}
)
}
}
|
use tokio::io::AsyncReadExt;
use tokio::io::AsyncWriteExt;
use std::os::unix::io::AsRawFd;
use std::io;
use std::os::unix::io::RawFd;
use std::os::unix::prelude::IntoRawFd;
use tokio::process::Command;
fn dup(fd: RawFd) -> io::Result<RawFd> {
let r = unsafe {
libc::dup(fd)
};
if r == -1 {
return Err(io::Error::last_os_error());
}
Ok(r)
}
fn dup2(fd: RawFd, newfd: RawFd) -> io::Result<()> {
let r = unsafe {
libc::dup2(fd, newfd)
};
if r == -1 {
return Err(io::Error::last_os_error());
}
Ok(())
}
fn close(fd: RawFd) -> io::Result<()> {
let r = unsafe {
libc::close(fd)
};
if r == -1 {
return Err(io::Error::last_os_error());
}
Ok(())
}
fn set_blocking(fd: RawFd) -> io::Result<()> {
let r = unsafe {
libc::fcntl(fd, libc::F_GETFL)
};
if r == -1 {
return Err(io::Error::last_os_error());
}
if r & libc::O_NONBLOCK != 0 {
let r = unsafe {
libc::fcntl(fd, libc::F_SETFL, r ^ libc::O_NONBLOCK)
};
if r == -1 {
return Err(io::Error::last_os_error());
}
}
Ok(())
}
#[tokio::test]
async fn test_asynchronous() -> io::Result<()> {
let script = r#"#!/usr/bin/env python3
import os, shutil
print(os.stat(3))
with os.fdopen(3, 'rb') as r:
with os.fdopen(4, 'wb') as w:
shutil.copyfileobj(r, w)
"#;
let (input, mut their_input) = tokio_pipe::pipe()?;
let (mut their_output, output) = tokio_pipe::pipe()?;
let mut command = Command::new("python3");
command.args(["-c", script]);
set_blocking(input.as_raw_fd())?;
set_blocking(output.as_raw_fd())?;
let mut child = unsafe {
let input = input.into_raw_fd();
let output = output.into_raw_fd();
// unregister from mio registration
let no_cloexec_input = dup(input)?;
let no_cloexec_output = dup(output)?;
close(input)?;
close(output)?;
let child = command.pre_exec(move || {
dup2(no_cloexec_input, 3)?;
dup2(no_cloexec_output, 4)?;
close(no_cloexec_input)?;
close(no_cloexec_output)?;
Ok(())
}).spawn();
close(no_cloexec_input)?;
close(no_cloexec_output)?;
child?
};
their_input.write_all(&b"Hello, World!"[..]).await?;
drop(their_input);
let mut buf = vec![];
their_output.read_to_end(&mut buf).await?;
assert_eq!(&b"Hello, World!"[..], &buf);
assert!(child.wait().await?.success());
Ok(())
}
|
use nom::types::CompleteStr;
use nom::IResult;
use std::error::Error;
named!(parse_sig<CompleteStr, CompleteStr>,
do_parse!(
take_until_and_consume!(r#""signature","#) >>
f: take_until!("(") >>
delimited!( tag!("("), take_until!(")"), tag!(")"))>>
(f)
)
);
fn definition<'a>(
input: CompleteStr<'a>,
name: &'a str,
) -> IResult<CompleteStr<'a>, CompleteStr<'a>> {
let mut exp = String::new();
exp.push_str("(?m)");
exp.push_str(r"var\s?");
exp.push_str(name);
exp.push_str(r"\s?=\s?\{(\w+:function(.*)\{[^}]+},?\s*)+};");
re_find!(input, exp.as_str())
}
fn fdef<'a>(input: CompleteStr<'a>, name: &'a str) -> IResult<CompleteStr<'a>, CompleteStr<'a>> {
let mut exp = String::new();
exp.push_str(name);
exp.push_str(r"=function\(\w+\)\{.+\};");
re_find!(input, exp.as_str())
}
named!(
var<&str, Vec<&str>>,
re_capture_static!(r"(\w+)\.\w+\(\w+,\d+\)")
);
pub fn decipher(script: &str) -> Result<(String, String), Box<Error>> {
let mut temp_js = String::new();
let (_, f) = parse_sig(CompleteStr(script)).unwrap();
let (_, def) = fdef(CompleteStr(script), &f).unwrap();
let (_, v) = var(def.0).unwrap();
temp_js.push_str(def.0);
match definition(CompleteStr(script), v[1]) {
Ok((_, body)) => {
temp_js.push_str(body.0);
// temp_js.push_str(&format!(r#"console.log({}("{}"))"#, f.0, sig));
// fs::write("temp.js", &temp_js).unwrap();
}
Err(_) => {
println!("bad");
}
}
debug!("=====signature function start=====");
debug!("{}", temp_js);
debug!("=====signature function end=====");
Ok((temp_js, f.0.to_string()))
}
// #[test]
// fn def_test() {
// let script = fs::read_to_string("./script.js").unwrap();
// match fdef(CompleteStr(&script), "pL") {
// Ok((_, list)) => {
// println!("Ok {:?}", list);
// }
// Err(e) => {
// println!("Err {:?}", e);
// println!("Err");
// }
// }
// }
// #[test]
// fn definition_test() {
// let script = fs::read_to_string("./script.js").unwrap();
// match definition(CompleteStr(&script), "oL") {
// Ok((_, list)) => {
// println!("Ok {:?}", list);
// }
// Err(e) => {
// println!("Err {:?}", e);
// println!("Err");
// }
// }
// }
// #[test]
// fn var_test() {
// let script = r#"pL=function(a){a=a.split("");oL.yq(a,27);oL.Z0(a,50);oL.ZB(a,2);oL.yq(a,80);oL.ZB(a,3);return a.join("")};"#;
// match var(&script) {
// Ok((_, list)) => {
// println!("Ok {:?}", list[1]);
// }
// Err(e) => {
// println!("Err {:?}", e);
// println!("Err");
// }
// }
// }
#[test]
fn decipher_test() {
let script = ::std::fs::read_to_string("./script.js").unwrap();
match decipher(script.as_str()) {
Ok(_) => {}
Err(e) => {
println!("Err {:?}", e);
println!("Err");
}
}
}
|
#![allow(non_snake_case, non_camel_case_types)]
use libc::{c_char, c_int, c_uchar, c_uint, c_void};
use crate::channel::ssh_channel;
use crate::fd_set;
use crate::pki::ssh_key;
use crate::{socket_t, timeval};
pub type ssh_server_known_e = c_int;
pub const SSH_SERVER_KNOWN_SSH_SERVER_ERROR: ssh_server_known_e = -1;
pub const SSH_SERVER_KNOWN_SSH_SERVER_NOT_KNOWN: ssh_server_known_e = 0;
pub const SSH_SERVER_KNOWN_SSH_SERVER_KNOWN_OK: ssh_server_known_e = 1;
pub const SSH_SERVER_KNOWN_SSH_SERVER_KNOWN_CHANGED: ssh_server_known_e = 2;
pub const SSH_SERVER_KNOWN_SSH_SERVER_FOUND_OTHER: ssh_server_known_e = 3;
pub const SSH_SERVER_KNOWN_SSH_SERVER_FILE_NOT_FOUND: ssh_server_known_e = 4;
pub type ssh_known_hosts_e = c_int;
/// There had been an error checking the host.
pub const SSH_KNOWN_HOSTS_ERROR: ssh_known_hosts_e = -2;
/// The known host file does not exist. The host is thus unknown. File will
/// be created if host key is accepted.
pub const SSH_KNOWN_HOSTS_NOT_FOUND: ssh_known_hosts_e = -1;
/// The server is unknown. User should confirm the public key hash is
/// correct.
pub const SSH_KNOWN_HOSTS_UNKNOWN: ssh_known_hosts_e = 0;
/// The server is known and has not changed.
pub const SSH_KNOWN_HOSTS_OK: ssh_known_hosts_e = 1;
/// The server key has changed. Either you are under attack or the
/// administrator changed the key. You HAVE to warn the user about a
/// possible attack.
pub const SSH_KNOWN_HOSTS_CHANGED: ssh_known_hosts_e = 2;
/// The server gave use a key of a type while we had an other type recorded.
/// It is a possible attack.
pub const SSH_KNOWN_HOSTS_OTHER: ssh_known_hosts_e = 3;
pub type ssh_options_e = c_int;
pub const SSH_OPTIONS_HOST: ssh_options_e = 0;
pub const SSH_OPTIONS_PORT: ssh_options_e = 1;
pub const SSH_OPTIONS_PORT_STR: ssh_options_e = 2;
pub const SSH_OPTIONS_FD: ssh_options_e = 3;
pub const SSH_OPTIONS_USER: ssh_options_e = 4;
pub const SSH_OPTIONS_SSH_DIR: ssh_options_e = 5;
pub const SSH_OPTIONS_IDENTITY: ssh_options_e = 6;
pub const SSH_OPTIONS_ADD_IDENTITY: ssh_options_e = 7;
pub const SSH_OPTIONS_KNOWNHOSTS: ssh_options_e = 8;
pub const SSH_OPTIONS_TIMEOUT: ssh_options_e = 9;
pub const SSH_OPTIONS_TIMEOUT_USEC: ssh_options_e = 10;
pub const SSH_OPTIONS_SSH1: ssh_options_e = 11;
pub const SSH_OPTIONS_SSH2: ssh_options_e = 12;
pub const SSH_OPTIONS_LOG_VERBOSITY: ssh_options_e = 13;
pub const SSH_OPTIONS_LOG_VERBOSITY_STR: ssh_options_e = 14;
pub const SSH_OPTIONS_CIPHERS_C_S: ssh_options_e = 15;
pub const SSH_OPTIONS_CIPHERS_S_C: ssh_options_e = 16;
pub const SSH_OPTIONS_COMPRESSION_C_S: ssh_options_e = 17;
pub const SSH_OPTIONS_COMPRESSION_S_C: ssh_options_e = 18;
pub const SSH_OPTIONS_PROXYCOMMAND: ssh_options_e = 19;
pub const SSH_OPTIONS_BINDADDR: ssh_options_e = 20;
pub const SSH_OPTIONS_STRICTHOSTKEYCHECK: ssh_options_e = 21;
pub const SSH_OPTIONS_COMPRESSION: ssh_options_e = 22;
pub const SSH_OPTIONS_COMPRESSION_LEVEL: ssh_options_e = 23;
pub const SSH_OPTIONS_KEY_EXCHANGE: ssh_options_e = 24;
pub const SSH_OPTIONS_HOSTKEYS: ssh_options_e = 25;
pub const SSH_OPTIONS_GSSAPI_SERVER_IDENTITY: ssh_options_e = 26;
pub const SSH_OPTIONS_GSSAPI_CLIENT_IDENTITY: ssh_options_e = 27;
pub const SSH_OPTIONS_GSSAPI_DELEGATE_CREDENTIALS: ssh_options_e = 28;
pub const SSH_OPTIONS_HMAC_C_S: ssh_options_e = 29;
pub const SSH_OPTIONS_HMAC_S_C: ssh_options_e = 30;
pub const SSH_OPTIONS_PASSWORD_AUTH: ssh_options_e = 31;
pub const SSH_OPTIONS_PUBKEY_AUTH: ssh_options_e = 32;
pub const SSH_OPTIONS_KBDINT_AUTH: ssh_options_e = 33;
pub const SSH_OPTIONS_GSSAPI_AUTH: ssh_options_e = 34;
pub const SSH_OPTIONS_GLOBAL_KNOWNHOSTS: ssh_options_e = 35;
pub const SSH_OPTIONS_NODELAY: ssh_options_e = 36;
pub const SSH_OPTIONS_PUBLICKEY_ACCEPTED_TYPES: ssh_options_e = 37;
pub const SSH_OPTIONS_PROCESS_CONFIG: ssh_options_e = 38;
pub const SSH_OPTIONS_REKEY_DATA: ssh_options_e = 39;
pub const SSH_OPTIONS_REKEY_TIME: ssh_options_e = 40;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ssh_session_struct {
_unused: [u8; 0],
}
pub type ssh_session = *mut ssh_session_struct;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ssh_counter_struct {
pub in_bytes: u64,
pub out_bytes: u64,
pub in_packets: u64,
pub out_packets: u64,
}
pub type ssh_counter = *mut ssh_counter_struct;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ssh_knownhosts_entry {
pub hostname: *mut c_char,
pub unparsed: *mut c_char,
pub publickey: ssh_key,
pub comment: *mut c_char,
}
extern "C" {
pub fn ssh_connect(session: ssh_session) -> c_int;
pub fn ssh_get_issue_banner(session: ssh_session) -> *mut c_char;
pub fn ssh_get_openssh_version(session: ssh_session) -> c_int;
pub fn ssh_disconnect(session: ssh_session);
pub fn ssh_copyright() -> *const c_char;
pub fn ssh_select(
channels: *mut ssh_channel,
outchannels: *mut ssh_channel,
maxfd: socket_t,
readfds: *mut fd_set,
timeout: *mut timeval,
) -> c_int;
pub fn ssh_get_fingerprint_hash(type_: i32, hash: *mut c_uchar, len: usize) -> *mut c_char;
pub fn ssh_print_hash(type_: i32, hash: *mut c_uchar, len: usize);
#[deprecated]
pub fn ssh_is_server_known(session: ssh_session) -> c_int;
#[deprecated]
pub fn ssh_dump_knownhost(session: ssh_session) -> *mut c_char;
#[deprecated]
pub fn ssh_write_knownhost(session: ssh_session) -> c_int;
pub fn ssh_knownhosts_entry_free(entry: *mut ssh_knownhosts_entry);
pub fn ssh_known_hosts_parse_line(
host: *const c_char,
line: *const c_char,
entry: *mut *mut ssh_knownhosts_entry,
) -> c_int;
pub fn ssh_session_has_known_hosts_entry(session: ssh_session) -> i32;
pub fn ssh_session_export_known_hosts_entry(
session: ssh_session,
pentry_string: *mut *mut c_char,
) -> c_int;
pub fn ssh_session_update_known_hosts(session: ssh_session) -> c_int;
pub fn ssh_session_get_known_hosts_entry(
session: ssh_session,
pentry: *mut *mut ssh_knownhosts_entry,
) -> i32;
pub fn ssh_session_is_known_server(session: ssh_session) -> i32;
pub fn ssh_options_copy(src: ssh_session, dest: *mut ssh_session) -> c_int;
pub fn ssh_options_set(session: ssh_session, type_: c_int, value: *const c_void) -> c_int;
pub fn ssh_options_get_port(session: ssh_session, port_target: *mut c_uint) -> c_int;
pub fn ssh_options_get(session: ssh_session, type_: c_int, value: *mut *mut c_char) -> c_int;
pub fn ssh_options_getopt(
session: ssh_session,
argcptr: *mut c_int,
argv: *mut *mut c_char,
) -> c_int;
pub fn ssh_options_parse_config(session: ssh_session, filename: *const c_char) -> c_int;
pub fn ssh_new() -> ssh_session;
pub fn ssh_free(session: ssh_session);
pub fn ssh_get_clientbanner(session: ssh_session) -> *const c_char;
pub fn ssh_get_serverbanner(session: ssh_session) -> *const c_char;
pub fn ssh_get_kex_algo(session: ssh_session) -> *const c_char;
pub fn ssh_get_cipher_in(session: ssh_session) -> *const c_char;
pub fn ssh_get_cipher_out(session: ssh_session) -> *const c_char;
pub fn ssh_get_hmac_in(session: ssh_session) -> *const c_char;
pub fn ssh_get_hmac_out(session: ssh_session) -> *const c_char;
pub fn ssh_silent_disconnect(session: ssh_session);
pub fn ssh_set_blocking(session: ssh_session, blocking: c_int);
pub fn ssh_is_blocking(session: ssh_session) -> c_int;
pub fn ssh_is_connected(session: ssh_session) -> c_int;
pub fn ssh_get_fd(session: ssh_session) -> socket_t;
pub fn ssh_set_fd_toread(session: ssh_session);
pub fn ssh_set_fd_towrite(session: ssh_session);
pub fn ssh_set_fd_except(session: ssh_session);
pub fn ssh_get_status(session: ssh_session) -> c_int;
pub fn ssh_get_poll_flags(session: ssh_session) -> c_int;
pub fn ssh_get_disconnect_message(session: ssh_session) -> *const c_char;
pub fn ssh_get_version(session: ssh_session) -> c_int;
pub fn ssh_send_ignore(session: ssh_session, data: *const c_char) -> c_int;
pub fn ssh_send_debug(
session: ssh_session,
message: *const c_char,
always_display: c_int,
) -> c_int;
pub fn ssh_set_counters(session: ssh_session, scounter: ssh_counter, rcounter: ssh_counter);
#[deprecated]
pub fn ssh_get_pubkey_hash(session: ssh_session, hash: *mut *mut c_uchar) -> c_int;
pub fn ssh_clean_pubkey_hash(hash: *mut *mut c_uchar);
pub fn ssh_get_server_publickey(session: ssh_session, key: *mut ssh_key) -> c_int;
#[deprecated]
pub fn ssh_get_publickey(session: ssh_session, key: *mut ssh_key) -> c_int;
pub fn ssh_get_publickey_hash(
key: ssh_key,
type_: i32,
hash: *mut *mut c_uchar,
hlen: *mut usize,
) -> c_int;
}
|
#[doc = "Reader of register SM_EXECCTRL"]
pub type R = crate::R<u32, super::SM_EXECCTRL>;
#[doc = "Writer for register SM_EXECCTRL"]
pub type W = crate::W<u32, super::SM_EXECCTRL>;
#[doc = "Register SM_EXECCTRL `reset()`'s with value 0x0001_f000"]
impl crate::ResetValue for super::SM_EXECCTRL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x0001_f000
}
}
#[doc = "Reader of field `EXEC_STALLED`"]
pub type EXEC_STALLED_R = crate::R<bool, bool>;
#[doc = "Reader of field `SIDE_EN`"]
pub type SIDE_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SIDE_EN`"]
pub struct SIDE_EN_W<'a> {
w: &'a mut W,
}
impl<'a> SIDE_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30);
self.w
}
}
#[doc = "Reader of field `SIDE_PINDIR`"]
pub type SIDE_PINDIR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SIDE_PINDIR`"]
pub struct SIDE_PINDIR_W<'a> {
w: &'a mut W,
}
impl<'a> SIDE_PINDIR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29);
self.w
}
}
#[doc = "Reader of field `JMP_PIN`"]
pub type JMP_PIN_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `JMP_PIN`"]
pub struct JMP_PIN_W<'a> {
w: &'a mut W,
}
impl<'a> JMP_PIN_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x1f << 24)) | (((value as u32) & 0x1f) << 24);
self.w
}
}
#[doc = "Reader of field `OUT_EN_SEL`"]
pub type OUT_EN_SEL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `OUT_EN_SEL`"]
pub struct OUT_EN_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> OUT_EN_SEL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x1f << 19)) | (((value as u32) & 0x1f) << 19);
self.w
}
}
#[doc = "Reader of field `INLINE_OUT_EN`"]
pub type INLINE_OUT_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `INLINE_OUT_EN`"]
pub struct INLINE_OUT_EN_W<'a> {
w: &'a mut W,
}
impl<'a> INLINE_OUT_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18);
self.w
}
}
#[doc = "Reader of field `OUT_STICKY`"]
pub type OUT_STICKY_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OUT_STICKY`"]
pub struct OUT_STICKY_W<'a> {
w: &'a mut W,
}
impl<'a> OUT_STICKY_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);
self.w
}
}
#[doc = "Reader of field `WRAP_TOP`"]
pub type WRAP_TOP_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `WRAP_TOP`"]
pub struct WRAP_TOP_W<'a> {
w: &'a mut W,
}
impl<'a> WRAP_TOP_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x1f << 12)) | (((value as u32) & 0x1f) << 12);
self.w
}
}
#[doc = "Reader of field `WRAP_BOTTOM`"]
pub type WRAP_BOTTOM_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `WRAP_BOTTOM`"]
pub struct WRAP_BOTTOM_W<'a> {
w: &'a mut W,
}
impl<'a> WRAP_BOTTOM_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x1f << 7)) | (((value as u32) & 0x1f) << 7);
self.w
}
}
#[doc = "Comparison used for the MOV x, STATUS instruction.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum STATUS_SEL_A {
#[doc = "0: All-ones if TX FIFO level < N, otherwise all-zeroes"]
TXLEVEL = 0,
#[doc = "1: All-ones if RX FIFO level < N, otherwise all-zeroes"]
RXLEVEL = 1,
}
impl From<STATUS_SEL_A> for bool {
#[inline(always)]
fn from(variant: STATUS_SEL_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `STATUS_SEL`"]
pub type STATUS_SEL_R = crate::R<bool, STATUS_SEL_A>;
impl STATUS_SEL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> STATUS_SEL_A {
match self.bits {
false => STATUS_SEL_A::TXLEVEL,
true => STATUS_SEL_A::RXLEVEL,
}
}
#[doc = "Checks if the value of the field is `TXLEVEL`"]
#[inline(always)]
pub fn is_txlevel(&self) -> bool {
*self == STATUS_SEL_A::TXLEVEL
}
#[doc = "Checks if the value of the field is `RXLEVEL`"]
#[inline(always)]
pub fn is_rxlevel(&self) -> bool {
*self == STATUS_SEL_A::RXLEVEL
}
}
#[doc = "Write proxy for field `STATUS_SEL`"]
pub struct STATUS_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> STATUS_SEL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: STATUS_SEL_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "All-ones if TX FIFO level < N, otherwise all-zeroes"]
#[inline(always)]
pub fn txlevel(self) -> &'a mut W {
self.variant(STATUS_SEL_A::TXLEVEL)
}
#[doc = "All-ones if RX FIFO level < N, otherwise all-zeroes"]
#[inline(always)]
pub fn rxlevel(self) -> &'a mut W {
self.variant(STATUS_SEL_A::RXLEVEL)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `STATUS_N`"]
pub type STATUS_N_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `STATUS_N`"]
pub struct STATUS_N_W<'a> {
w: &'a mut W,
}
impl<'a> STATUS_N_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);
self.w
}
}
impl R {
#[doc = "Bit 31 - If 1, an instruction written to SMx_INSTR is stalled, and latched by the state machine. Will clear to 0 once this instruction completes."]
#[inline(always)]
pub fn exec_stalled(&self) -> EXEC_STALLED_R {
EXEC_STALLED_R::new(((self.bits >> 31) & 0x01) != 0)
}
#[doc = "Bit 30 - If 1, the MSB of the Delay/Side-set instruction field is used as side-set enable, rather than a side-set data bit. This allows instructions to perform side-set optionally, rather than on every instruction, but the maximum possible side-set width is reduced from 5 to 4. Note that the value of PINCTRL_SIDESET_COUNT is inclusive of this enable bit."]
#[inline(always)]
pub fn side_en(&self) -> SIDE_EN_R {
SIDE_EN_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 29 - If 1, side-set data is asserted to pin directions, instead of pin values"]
#[inline(always)]
pub fn side_pindir(&self) -> SIDE_PINDIR_R {
SIDE_PINDIR_R::new(((self.bits >> 29) & 0x01) != 0)
}
#[doc = "Bits 24:28 - The GPIO number to use as condition for JMP PIN. Unaffected by input mapping."]
#[inline(always)]
pub fn jmp_pin(&self) -> JMP_PIN_R {
JMP_PIN_R::new(((self.bits >> 24) & 0x1f) as u8)
}
#[doc = "Bits 19:23 - Which data bit to use for inline OUT enable"]
#[inline(always)]
pub fn out_en_sel(&self) -> OUT_EN_SEL_R {
OUT_EN_SEL_R::new(((self.bits >> 19) & 0x1f) as u8)
}
#[doc = "Bit 18 - If 1, use a bit of OUT data as an auxiliary write enable\\n When used in conjunction with OUT_STICKY, writes with an enable of 0 will\\n deassert the latest pin write. This can create useful masking/override behaviour\\n due to the priority ordering of state machine pin writes (SM0 < SM1 < ...)"]
#[inline(always)]
pub fn inline_out_en(&self) -> INLINE_OUT_EN_R {
INLINE_OUT_EN_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 17 - Continuously assert the most recent OUT/SET to the pins"]
#[inline(always)]
pub fn out_sticky(&self) -> OUT_STICKY_R {
OUT_STICKY_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bits 12:16 - After reaching this address, execution is wrapped to wrap_bottom.\\n If the instruction is a jump, and the jump condition is true, the jump takes priority."]
#[inline(always)]
pub fn wrap_top(&self) -> WRAP_TOP_R {
WRAP_TOP_R::new(((self.bits >> 12) & 0x1f) as u8)
}
#[doc = "Bits 7:11 - After reaching wrap_top, execution is wrapped to this address."]
#[inline(always)]
pub fn wrap_bottom(&self) -> WRAP_BOTTOM_R {
WRAP_BOTTOM_R::new(((self.bits >> 7) & 0x1f) as u8)
}
#[doc = "Bit 4 - Comparison used for the MOV x, STATUS instruction."]
#[inline(always)]
pub fn status_sel(&self) -> STATUS_SEL_R {
STATUS_SEL_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bits 0:3 - Comparison level for the MOV x, STATUS instruction"]
#[inline(always)]
pub fn status_n(&self) -> STATUS_N_R {
STATUS_N_R::new((self.bits & 0x0f) as u8)
}
}
impl W {
#[doc = "Bit 30 - If 1, the MSB of the Delay/Side-set instruction field is used as side-set enable, rather than a side-set data bit. This allows instructions to perform side-set optionally, rather than on every instruction, but the maximum possible side-set width is reduced from 5 to 4. Note that the value of PINCTRL_SIDESET_COUNT is inclusive of this enable bit."]
#[inline(always)]
pub fn side_en(&mut self) -> SIDE_EN_W {
SIDE_EN_W { w: self }
}
#[doc = "Bit 29 - If 1, side-set data is asserted to pin directions, instead of pin values"]
#[inline(always)]
pub fn side_pindir(&mut self) -> SIDE_PINDIR_W {
SIDE_PINDIR_W { w: self }
}
#[doc = "Bits 24:28 - The GPIO number to use as condition for JMP PIN. Unaffected by input mapping."]
#[inline(always)]
pub fn jmp_pin(&mut self) -> JMP_PIN_W {
JMP_PIN_W { w: self }
}
#[doc = "Bits 19:23 - Which data bit to use for inline OUT enable"]
#[inline(always)]
pub fn out_en_sel(&mut self) -> OUT_EN_SEL_W {
OUT_EN_SEL_W { w: self }
}
#[doc = "Bit 18 - If 1, use a bit of OUT data as an auxiliary write enable\\n When used in conjunction with OUT_STICKY, writes with an enable of 0 will\\n deassert the latest pin write. This can create useful masking/override behaviour\\n due to the priority ordering of state machine pin writes (SM0 < SM1 < ...)"]
#[inline(always)]
pub fn inline_out_en(&mut self) -> INLINE_OUT_EN_W {
INLINE_OUT_EN_W { w: self }
}
#[doc = "Bit 17 - Continuously assert the most recent OUT/SET to the pins"]
#[inline(always)]
pub fn out_sticky(&mut self) -> OUT_STICKY_W {
OUT_STICKY_W { w: self }
}
#[doc = "Bits 12:16 - After reaching this address, execution is wrapped to wrap_bottom.\\n If the instruction is a jump, and the jump condition is true, the jump takes priority."]
#[inline(always)]
pub fn wrap_top(&mut self) -> WRAP_TOP_W {
WRAP_TOP_W { w: self }
}
#[doc = "Bits 7:11 - After reaching wrap_top, execution is wrapped to this address."]
#[inline(always)]
pub fn wrap_bottom(&mut self) -> WRAP_BOTTOM_W {
WRAP_BOTTOM_W { w: self }
}
#[doc = "Bit 4 - Comparison used for the MOV x, STATUS instruction."]
#[inline(always)]
pub fn status_sel(&mut self) -> STATUS_SEL_W {
STATUS_SEL_W { w: self }
}
#[doc = "Bits 0:3 - Comparison level for the MOV x, STATUS instruction"]
#[inline(always)]
pub fn status_n(&mut self) -> STATUS_N_W {
STATUS_N_W { w: self }
}
}
|
use megam_api::util::accounts::Account;
use megam_api::util::accounts::Success;
use hamcrest::assert_that;
#[test]
fn create() {
let mut p = Account{first_name: format!("{}", "rr"), last_name: format!("{}", ""), phone: format!("{}", ""), email: format!("{}", "b@test.com"), api_key: format!("{}", "testapikey"), password: format!("{}", "testpassword"), authority: format!("{}", ""), password_reset_key: format!("{}", ""), password_reset_sent_at: format!("{}", "")};
//p.create();
// assert_that(p.create(), execs().with_status(0));
//assert_that(p.create(), Ok(Success::Success));
//assert_that(p.create(), is(Ok(Success::Success)));
//assert!(p.create().is_err());
}
|
use super::SigningPublicKey;
use crate::dev::*;
use derive_more::{Add, AddAssign, AsRef, From, Into};
use smallvec::SmallVec;
use std::{
cmp::Ordering,
time::{Duration, Instant},
};
/// Represents a path from the root to a node.
/// This path is generally part of a spanning tree, except possibly the last hop
/// (it can loop when sending coords to your parent, but they will see this and
/// know not to use a looping path).
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Coords(SmallVec<[SwitchPort; Self::DEFAULT_SIZE]>);
impl Coords {
///
const DEFAULT_SIZE: usize = 8;
// ///
// #[inline]
// pub fn distance(&self, other: &Self) -> i64 {
// // TODO: other might need to be bytes from the wire protocol
// unimplemented!()
// }
///
#[inline]
pub fn len(&self) -> usize {
self.0.len()
}
}
// impl std::convert::TryFrom<&WireCoords> for Coords {
// type Error = Error;
// fn try_from(coords: &WireCoords) -> Result<Self, Self::Error> {
// unimplemented!()
// }
// }
/// Represents an encoded, compressed representation of [`Coords`].
///
/// [`Coords`]: struct.Coords
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct WireCoords(SmallVec<[u8; Self::DEFAULT_BYTES_SIZE]>);
impl WireCoords {
const DEFAULT_BYTES_SIZE: usize = 32;
// ///
// #[inline]
// pub fn distance(&self, other: &Self) -> i64 {
// unimplemented!()
// }
#[inline]
pub fn len(&self) -> usize {
self.0.len()
}
}
impl From<&Coords> for WireCoords {
#[inline]
fn from(coords: &Coords) -> Self {
unimplemented!()
}
}
impl PartialOrd for WireCoords {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
// if self.0.len() > other.0.len() {
// Some(Ordering::Greater)
// } else {
// for i in self.iter() {
// if self.0[i] != other.0[i] {
// return Some(Ordering::Greater);
// }
// }
// Some(Ordering::Less)
// }
unimplemented!()
}
}
// impl Ord for WireCoords {
// fn cmp(&self, other: &Self) -> Ordering {
// self.partial_cmp(other).unwrap()
// }
// }
/// Uniquely identifies a linked peer
///
/// TODO docs Interface number of a given peer (in the switch?)
#[derive(
AddAssign, AsRef, Clone, Copy, Debug, Default, Eq, From, Hash, Into, Ord, PartialEq, PartialOrd,
)]
#[repr(transparent)]
pub struct SwitchPort(u64);
/// Represents the topology and network state-dependent info about a node, sans
/// the signatures that accompany it. Nodes will pick the best root they see,
/// provided that the root continues to push out updates with new timestamps.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct SwitchLocator {
root: SigningPublicKey,
timestamp: u32, // TODO? duration, instant?
coords: Coords,
}
impl SwitchLocator {
/// Gets the distance a `SwitchLocator` is from the provided destination
/// [`WireCoords`]
#[inline]
pub fn distance(&self, coords: &WireCoords) -> i64 {
unimplemented!()
}
#[inline]
pub fn coords(&self) -> &Coords {
&self.coords
}
#[inline]
pub fn wire_coords(&self) -> WireCoords {
// let mut wire_coords = [u8, self.coords.len()];
// for port in self.coords.iter() {
// (&mut wire_coords).write()
// }
unimplemented!()
}
/// Returns `true` if this locator represents an ancestor of the locator
/// given as an argument.
#[inline]
pub fn is_ancestor_of(&self, other: &Self) -> bool {
self < other
}
}
/// Returns an ordering of `SwitchLocator`s, with the lesser being closer to
/// the root, i.e. the ancestor of the other.
/// ? aka switchLocator.ldist
impl PartialOrd for SwitchLocator {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
unimplemented!()
}
}
/// Returns an ordering of `SwitchLocator`s, with the lesser being closer to
/// the root, i.e. the ancestor of the other.
impl Ord for SwitchLocator {
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
unimplemented!()
}
}
/// Contains the root node's signing key, timestamp, and signed per-hop info
/// about a path from the root node to some other node in the network.
/// This is exchanged with peers to construct the spanning tree.
/// A subset of this information, excluding signatures, is used to construct
/// [`SwitchLocator`]s.
///
/// [`SwitchLocator`]: struct.SwitchLocator
#[derive(Clone, Copy, Debug)]
pub struct RootUpdate {
root: SigningPublicKey,
timestamp: Instant,
// hops: TODO:
}
/// Represents the signed information about the path leading from the root to
/// the `next` node, via the `port` specified here.
#[derive(Clone, Copy, Debug)]
pub struct SwitchMessageHop {
port: SwitchPort,
next: SigningPublicKey,
// signature: TODO:
}
|
pub mod coordinates;
|
use crate::args::Args;
use crate::bash::is_literal_bash_string;
use crate::path_clean::PathClean;
use crate::trace::Trace;
use itertools::{chain, Itertools};
use nix::unistd::{access, AccessFlags};
use once_cell::sync::Lazy;
use std::collections::{BTreeMap, HashSet};
use std::env::current_dir;
use std::ffi::{OsStr, OsString};
use std::fs::{read, read_link, File};
use std::io::{Read, Write};
use std::os::unix::ffi::OsStrExt;
use std::os::unix::prelude::OsStringExt;
use std::os::unix::process::CommandExt;
use std::os::unix::process::ExitStatusExt;
use std::path::{Path, PathBuf};
use std::process::{exit, Command, Stdio};
use std::time::Instant;
use tempfile::NamedTempFile;
use ufcs::Pipe;
mod args;
mod bash;
mod nix_path;
mod path_clean;
mod shebang;
mod trace;
type EnvMap = BTreeMap<OsString, OsString>;
struct EnvOptions {
env: EnvMap,
bashopts: OsString,
shellopts: OsString,
}
static XDG_DIRS: Lazy<xdg::BaseDirectories> = Lazy::new(|| {
xdg::BaseDirectories::with_prefix("cached-nix-shell")
.expect("Can't get find base cache directory")
});
/// Serialize environment variables in the same way as `env -0` does.
fn serialize_env(env: &EnvMap) -> Vec<u8> {
let mut vec = Vec::new();
for (k, v) in env {
vec.extend(k.as_bytes());
vec.push(b'=');
vec.extend(v.as_bytes());
vec.push(0);
}
vec
}
/// Deserealize environment variables from `env -0` format.
fn deserealize_env(vec: Vec<u8>) -> EnvMap {
vec.split(|&b| b == 0)
.filter(|&var| !var.is_empty()) // last entry has trailing NUL
.map(|var| {
let pos = var.iter().position(|&x| x == b'=').unwrap();
(
OsStr::from_bytes(&var[0..pos]).to_owned(),
OsStr::from_bytes(&var[pos + 1..]).to_owned(),
)
})
.collect::<BTreeMap<_, _>>()
}
fn serialize_args(args: &[OsString]) -> Vec<u8> {
let mut vec = Vec::new();
for arg in args {
vec.extend(arg.as_bytes());
vec.push(0);
}
vec
}
fn serialize_vecs(vecs: &[&[u8]]) -> Vec<u8> {
let mut vec = Vec::new();
for v in vecs {
vec.extend(format!("{}\0", v.len()).as_str().as_bytes());
vec.extend(v.iter());
}
vec
}
fn unwrap_or_errx<T>(x: Result<T, String>) -> T {
match x {
Ok(x) => x,
Err(x) => {
eprintln!("cached-nix-shell: {x}");
exit(1)
}
}
}
struct NixShellInput {
pwd: PathBuf,
env: EnvMap,
args: Vec<OsString>,
weak_args: Vec<OsString>,
}
struct NixShellOutput {
env: EnvMap,
trace: trace::Trace,
drv: String,
}
fn minimal_essential_path() -> OsString {
let required_binaries = ["tar", "gzip", "git", "nix-shell", "rm"];
fn which_dir(binary: &&str) -> Option<PathBuf> {
std::env::var_os("PATH")
.as_ref()
.unwrap()
.pipe(std::env::split_paths)
.find(|dir| {
if access(&dir.join(binary), AccessFlags::X_OK).is_err() {
return false;
}
if binary == &"nix-shell" {
// Ignore our fake nix-shell.
return !dir
.join(binary)
.canonicalize()
.ok()
.and_then(|x| x.file_name().map(|x| x.to_os_string()))
.map(|x| x == "cached-nix-shell")
.unwrap_or(true);
}
true
})
}
let required_paths = required_binaries
.iter()
.filter_map(which_dir)
.collect::<HashSet<PathBuf>>();
// We can't just join_paths(required_paths) -- we need to preserve order
std::env::var_os("PATH")
.as_ref()
.unwrap()
.pipe(std::env::split_paths)
.filter(|path_item| required_paths.contains(path_item))
.unique()
.pipe(std::env::join_paths)
.unwrap()
}
fn absolute_dirname(script_fname: &OsStr) -> PathBuf {
Path::new(&script_fname)
.parent()
.expect("Can't get script dirname")
.pipe(absolute)
}
fn absolute(path: &Path) -> PathBuf {
if path.is_absolute() {
path.to_path_buf()
} else {
// We do not use PathBuf::canonicalize() here since we do not want
// symlink resolving.
current_dir().expect("Can't get PWD").join(path).clean()
}
}
fn args_to_inp(pwd: PathBuf, x: &Args) -> NixShellInput {
let mut args = Vec::new();
args.push(OsString::from("--pure"));
let env = {
let mut clean_env = BTreeMap::new();
// Env vars to pass to `nix-shell --pure`. Changes to these variables
// would invalidate the cache.
let whitelist = &[
"HOME",
"NIX_PATH",
// tmp dir
"TMPDIR",
"XDG_RUNTIME_DIR",
// ssl-related
"CURL_CA_BUNDLE",
"GIT_SSL_CAINFO",
"NIX_SSL_CERT_FILE",
"SSL_CERT_FILE",
// Necessary if nix build caches are accessed via a proxy
"http_proxy",
"https_proxy",
"ftp_proxy",
"all_proxy",
"no_proxy",
];
for var in whitelist {
if let Some(val) = std::env::var_os(var) {
clean_env.insert(OsString::from(var), val);
}
}
for var in x.keep.iter() {
if let Some(val) = std::env::var_os(var) {
clean_env.insert(var.clone(), val);
args.push("--keep".into());
args.push(var.clone());
}
}
clean_env.insert(OsString::from("PATH"), minimal_essential_path());
clean_env
};
args.extend(x.other_kw.clone());
args.push(OsString::from("--"));
args.extend(x.rest.clone());
NixShellInput {
pwd,
env,
args,
weak_args: x.weak_kw.clone(),
}
}
fn run_nix_shell(inp: &NixShellInput) -> NixShellOutput {
let trace_file = NamedTempFile::new().expect("can't create temporary file");
let env_file = NamedTempFile::new().expect("can't create temporary file");
let env_cmd = [
b"{ printf \"BASHOPTS=%s\\0SHELLOPTS=%s\\0\" \"${BASHOPTS-}\" \"${SHELLOPTS-}\" ; env -0; } >",
bash::quote(env_file.path().as_os_str().as_bytes()).as_slice(),
]
.concat();
let env = {
let status = Command::new(concat!(env!("CNS_NIX"), "nix-shell"))
.arg("--run")
.arg(OsStr::from_bytes(&env_cmd))
.args(&inp.weak_args)
.args(&inp.args)
.stderr(std::process::Stdio::inherit())
.current_dir(&inp.pwd)
.env_clear()
.envs(&inp.env)
.env("LD_PRELOAD", env!("CNS_TRACE_NIX_SO"))
.env("DYLD_INSERT_LIBRARIES", env!("CNS_TRACE_NIX_SO"))
.env("TRACE_NIX", trace_file.path())
.stdin(Stdio::null())
.status()
.expect("failed to execute nix-shell");
if !status.success() {
eprintln!("cached-nix-shell: nix-shell: {status}");
let code = status
.code()
.or_else(|| status.signal().map(|x| x + 127))
.unwrap_or(255);
exit(code);
}
let mut env = read(env_file.path())
.expect("can't read an environment file")
.pipe(deserealize_env);
// Drop session variables exported by bash
env.remove(OsStr::new("OLDPWD"));
env.remove(OsStr::new("PWD"));
env.remove(OsStr::new("SHLVL"));
env.remove(OsStr::new("_"));
env
};
let env_out = env
.get(OsStr::new("out"))
.expect("expected to have `out` environment variable");
let mut trace_file =
trace_file.reopen().expect("can't reopen temporary file");
let mut trace_data = Vec::new();
trace_file
.read_to_end(&mut trace_data)
.expect("Can't read trace file");
let trace = Trace::load(trace_data);
if trace.check_for_changes() {
eprintln!("cached-nix-shell: some files are already updated, cache won't be reused");
}
std::mem::drop(trace_file);
let drv: String = {
// nix 2.3
let mut exec = Command::new(concat!(env!("CNS_NIX"), "nix"))
.arg("show-derivation")
.arg(env_out)
.output()
.expect("failed to execute nix show-derivation");
let mut stderr = exec.stderr.clone();
if !exec.status.success() {
// nix 2.4
exec = Command::new(concat!(env!("CNS_NIX"), "nix"))
.arg("show-derivation")
.arg("--extra-experimental-features")
.arg("nix-command")
.arg(env_out)
.output()
.expect("failed to execute nix show-derivation");
stderr.extend(b"\n");
stderr.extend(exec.stderr);
}
if !exec.status.success() {
eprintln!(
"cached-nix-shell: failed to execute nix show-derivation"
);
let _ = std::io::stderr().write_all(&stderr);
exit(1);
}
// Path to .drv file is always in ASCII, so no information is lost.
let output = String::from_utf8_lossy(&exec.stdout);
let output: serde_json::Value =
serde_json::from_str(&output).expect("failed to parse json");
// The first key of the toplevel object contains the path to .drv file.
let (drv, _) = output.as_object().unwrap().into_iter().next().unwrap();
drv.clone()
};
NixShellOutput { env, trace, drv }
}
fn run_script(
fname: OsString,
nix_shell_args: Vec<OsString>,
script_args: Vec<OsString>,
) {
let nix_shell_args = Args::parse(nix_shell_args, true).pipe(unwrap_or_errx);
let inp = args_to_inp(absolute_dirname(&fname), &nix_shell_args);
let env = cached_shell_env(nix_shell_args.pure, &inp);
let exec = if is_literal_bash_string(nix_shell_args.interpreter.as_bytes())
{
// eprintln!("Interpreter is a literal string, executing directly");
Command::new(nix_shell_args.interpreter)
.arg(fname)
.args(script_args)
.env_clear()
.envs(&env.env)
.exec()
} else {
// eprintln!("Interpreter is bash command, executing 'bash -c'");
let mut exec_string = OsString::new();
exec_string.push("exec ");
exec_string.push(nix_shell_args.interpreter);
exec_string.push(r#" "$@""#);
Command::new("bash")
.arg("-c")
.arg(exec_string)
.arg("cached-nix-shell-bash") // corresponds to "$0" inside '-i'
.arg(fname)
.args(script_args)
.env_clear()
.envs(&env.env)
.exec()
};
eprintln!("cached-nix-shell: couldn't run: {exec:?}");
exit(1);
}
fn run_from_args(args: Vec<OsString>) {
let mut args = Args::parse(args, false).pipe(unwrap_or_errx);
// Normalize PWD.
// References:
// https://github.com/NixOS/nix/blob/2.3.10/src/libexpr/common-eval-args.cc#L46-L57
// https://github.com/NixOS/nix/blob/2.3.10/src/nix-build/nix-build.cc#L279-L291
let nix_shell_pwd = if nix_path::contains_relative_paths(&args) {
// in: nix-shell -I . ""
// out: cd $PWD; nix-shell -I . ""
current_dir().expect("Can't get PWD")
} else if args.packages_or_expr {
// in: nix-shel -p ...
// out: cd /var/empty; nix-shell -p ...
PathBuf::from(env!("CNS_VAR_EMPTY"))
} else if let [arg] = &mut args.rest[..] {
if arg == "" {
// in: nix-shell ""
// out: cd $PWD; nix-shell ""
// nix-shell "" will use ./default.nix
current_dir().expect("Can't get PWD")
} else if arg.as_bytes().starts_with(b"<")
&& arg.as_bytes().ends_with(b">")
|| nix_path::is_uri(arg.as_bytes())
{
// in: nix-shell '<foo>'
// out: cd /var/empty; nix-shell '<foo>'
// in: nix-shell http://...
// out: cd /var/empty; nix-shell http://...
PathBuf::from(env!("CNS_VAR_EMPTY"))
} else if arg.as_bytes().ends_with(b"/") || Path::new(arg).is_dir() {
// in: nix-shell /path/to/dir
// out: cd /path/to/dir; nix-shell .
let pwd = absolute(Path::new(arg));
*arg = OsString::from(".");
pwd
} else {
// in: nix-shell /path/to/file
// out: cd /path/to; nix-shell ./file
let pwd = absolute_dirname(arg);
*arg = PathBuf::from(&arg)
.components()
.next_back()
.unwrap()
.pipe(|x| PathBuf::from(".").join(x))
.into_os_string();
pwd
}
} else {
// in: nix-shell
// out: cd $PWD; nix-shell
// nix-shell will use ./shell.nix or ./default.nix
// in: nix-shell foo.nix bar.nix ...
current_dir().expect("Can't get PWD")
};
let inp = args_to_inp(nix_shell_pwd, &args);
let env = cached_shell_env(args.pure, &inp);
let (cmd, cmd_args) = match args.run {
args::RunMode::InteractiveShell => {
let mut args = vec!["--rcfile".into(), env!("CNS_RCFILE").into()];
args.append(build_bash_options(&env).as_mut());
("bash".into(), args)
}
args::RunMode::Shell(cmd) => {
let mut args = build_bash_options(&env);
args.extend_from_slice(&["-c".into(), cmd]);
("bash".into(), args)
}
args::RunMode::Exec(cmd, cmd_args) => (cmd, cmd_args),
};
let exec = Command::new(cmd)
.args(cmd_args)
.env_clear()
.envs(&env.env)
.exec();
eprintln!("cached-nix-shell: couldn't run: {exec:?}");
exit(1);
}
fn cached_shell_env(pure: bool, inp: &NixShellInput) -> EnvOptions {
let inputs = serialize_vecs(&[
&serialize_env(&inp.env),
&serialize_args(&inp.args),
inp.pwd.as_os_str().as_bytes(),
]);
let inputs_hash = blake3::hash(&inputs).to_hex().as_str().to_string();
let mut env = if let Some(env) = check_cache(&inputs_hash) {
env
} else {
eprintln!("cached-nix-shell: updating cache");
let start = Instant::now();
let outp = run_nix_shell(inp);
eprintln!("cached-nix-shell: done in {:?}", start.elapsed());
// TODO: use flock
cache_write(&inputs_hash, "inputs", &inputs);
cache_write(&inputs_hash, "env", &serialize_env(&outp.env));
cache_write(&inputs_hash, "trace", &outp.trace.serialize());
cache_symlink(&inputs_hash, "drv", &outp.drv);
outp.env
};
let shellopts = env.remove(OsStr::new("SHELLOPTS")).unwrap_or_default();
let bashopts = env.remove(OsStr::new("BASHOPTS")).unwrap_or_default();
env.insert(OsString::from("IN_CACHED_NIX_SHELL"), OsString::from("1"));
EnvOptions {
env: merge_env(if pure { env } else { merge_impure_env(env) }),
shellopts,
bashopts,
}
}
// Merge ambient (impure) environment into cached env.
fn merge_impure_env(mut env: EnvMap) -> EnvMap {
let mut delim = EnvMap::new();
delim.insert(OsString::from("PATH"), OsString::from(":"));
delim.insert(OsString::from("HOST_PATH"), OsString::from(":"));
delim.insert(OsString::from("XDG_DATA_DIRS"), OsString::from(":"));
// Set to "/no-cert-file.crt" by setup.sh for pure envs.
env.remove(OsStr::new("SSL_CERT_FILE"));
env.remove(OsStr::new("NIX_SSL_CERT_FILE"));
env.insert(OsString::from("IN_NIX_SHELL"), OsString::from("impure"));
for (var, val) in std::env::vars_os() {
env.entry(var.clone())
.and_modify(|old_val| {
if let Some(d) = delim.get(&var) {
*old_val = OsString::from(OsStr::from_bytes(
&[
old_val.as_os_str().as_bytes(),
d.as_os_str().as_bytes(),
val.as_os_str().as_bytes(),
]
.concat(),
));
}
})
.or_insert_with(|| val);
}
env
}
fn merge_env(mut env: EnvMap) -> EnvMap {
// These variables are always passed by the original nix-shell, regardless
// of the --pure flag.
let keep = &[
"USER",
"LOGNAME",
"DISPLAY",
"WAYLAND_DISPLAY",
"WAYLAND_SOCKET",
"TERM",
"NIX_SHELL_PRESERVE_PROMPT",
"TZ",
"PAGER",
"SHLVL",
];
for var in keep {
if let Some(vel) = std::env::var_os(var) {
env.insert(OsString::from(var), vel);
}
}
env
}
fn build_bash_options(env: &EnvOptions) -> Vec<OsString> {
// XXX: only check for options that are set by current stdenv and nix-shell.
const BASH_OPTIONS: [&[u8]; 3] =
[b"execfail", b"inherit_errexit", b"nullglob"];
const SHELL_OPTIONS: [&[u8]; 1] = [b"pipefail"];
chain!(
env.bashopts
.as_bytes()
.split(|b| *b == b':')
.filter(|opt| BASH_OPTIONS.contains(opt))
.map(|opt| vec!["-O".into(), OsString::from_vec(opt.to_vec())]),
env.shellopts
.as_bytes()
.split(|b| *b == b':')
.filter(|opt| SHELL_OPTIONS.contains(opt))
.map(|opt| vec!["-o".into(), OsString::from_vec(opt.to_vec())]),
)
.flatten()
.collect()
}
fn check_cache(hash: &str) -> Option<BTreeMap<OsString, OsString>> {
let env_fname = XDG_DIRS.find_cache_file(format!("{hash}.env"))?;
let drv_fname = XDG_DIRS.find_cache_file(format!("{hash}.drv"))?;
let trace_fname = XDG_DIRS.find_cache_file(format!("{hash}.trace"))?;
let env = read(env_fname).unwrap().pipe(deserealize_env);
let drv_store_fname = read_link(drv_fname).ok()?;
std::fs::metadata(drv_store_fname).ok()?;
let trace = read(trace_fname).unwrap().pipe(Trace::load);
if trace.check_for_changes() {
return None;
}
Some(env)
}
fn cache_write(hash: &str, ext: &str, text: &[u8]) {
let f = || -> Result<(), std::io::Error> {
let fname = XDG_DIRS.place_cache_file(format!("{hash}.{ext}"))?;
let mut file = File::create(fname)?;
file.write_all(text)?;
Ok(())
};
match f() {
Ok(_) => (),
Err(e) => eprintln!("Warning: can't store cache: {e}"),
}
}
fn cache_symlink(hash: &str, ext: &str, target: &str) {
let f = || -> Result<(), std::io::Error> {
let fname = XDG_DIRS.place_cache_file(format!("{hash}.{ext}"))?;
let _ = std::fs::remove_file(&fname);
std::os::unix::fs::symlink(target, &fname)?;
Ok(())
};
match f() {
Ok(_) => (),
Err(e) => eprintln!("Warning: can't symlink to cache: {e}"),
}
}
fn wrap(cmd: Vec<OsString>) {
if cmd.is_empty() {
eprintln!("cached-nix-shell: command not specified");
eprintln!("usage: cached-nix-shell --wrap COMMAND ARGS...");
exit(1);
}
if access(
Path::new(&format!("{}/nix-shell", env!("CNS_WRAP_PATH"))),
AccessFlags::X_OK,
)
.is_err()
{
eprintln!(
"cached-nix-shell: couldn't wrap, {}/nix-shell is not executable",
env!("CNS_WRAP_PATH")
);
exit(1);
}
let new_path = [
env!("CNS_WRAP_PATH").as_bytes(),
b":",
std::env::var_os("PATH").unwrap().as_bytes(),
]
.concat();
let exec = Command::new(&cmd[0])
.args(&cmd[1..])
.env("PATH", OsStr::from_bytes(&new_path))
.exec();
eprintln!("cached-nix-shell: couldn't run: {exec}");
exit(1);
}
fn main() {
let argv: Vec<OsString> = std::env::args_os().collect();
if argv.len() >= 2 && argv[1] == "--wrap" {
wrap(std::env::args_os().skip(2).collect());
}
if argv.len() >= 2 {
let fname = &argv[1];
if let Some(nix_shell_args) = shebang::parse_script(fname) {
run_script(
fname.clone(),
nix_shell_args,
std::env::args_os().skip(2).collect(),
);
}
}
run_from_args(std::env::args_os().skip(1).collect());
}
|
use response::page::Pages;
#[derive(Deserialize)]
pub struct Links {
pub pages: Option<Pages>,
}
|
use std::io::prelude::*;
use std::sync::Mutex;
use std::{io, fmt};
use std::ffi::OsStr;
use std::fs::{self, File};
use std::path::PathBuf;
#[cfg(any(target_os = "redox", rustdoc))]
use std::{
io::BufWriter,
};
use smallvec::SmallVec;
use log::{Metadata, Record};
/// An output that will be logged to. The two major outputs for most Redox system programs are
/// usually the log file, and the global stdout.
pub struct Output {
// the actual endpoint to write to.
endpoint: Mutex<Box<dyn Write + Send + 'static>>,
// useful for devices like BufWrite or BufRead. You don't want the log file to never but
// written until the program exists.
flush_on_newline: bool,
// specifies the maximum log level possible
filter: log::LevelFilter,
// specifies whether the file should contain ASCII escape codes
ansi: bool,
}
impl fmt::Debug for Output {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Output")
.field("endpoint", &"opaque")
.field("flush_on_newline", &self.flush_on_newline)
.field("filter", &self.filter)
.field("ansi", &self.ansi)
.finish()
}
}
pub struct OutputBuilder {
endpoint: Box<dyn Write + Send + 'static>,
flush_on_newline: Option<bool>,
filter: Option<log::LevelFilter>,
ansi: Option<bool>,
}
impl OutputBuilder {
pub fn in_redox_logging_scheme<A, B, C>(category: A, subcategory: B, logfile: C) -> Result<Self, io::Error>
where
A: AsRef<OsStr>,
B: AsRef<OsStr>,
C: AsRef<OsStr>,
{
if !cfg!(target_os = "redox") {
return Ok(Self::with_endpoint(Vec::new()));
}
let mut path = PathBuf::from("logging:");
path.push(category.as_ref());
path.push(subcategory.as_ref());
path.push(logfile.as_ref());
path.set_extension("log");
if let Some(parent) = path.parent() {
if !parent.exists() {
fs::create_dir_all(parent)?;
}
}
Ok(Self::with_endpoint(File::create(path)?))
}
pub fn stdout() -> Self {
Self::with_endpoint(io::stdout())
}
pub fn stderr() -> Self {
Self::with_endpoint(io::stderr())
}
pub fn with_endpoint<T>(endpoint: T) -> Self
where
T: Write + Send + 'static
{
Self::with_dyn_endpoint(Box::new(endpoint))
}
pub fn with_dyn_endpoint(endpoint: Box<dyn Write + Send + 'static>) -> Self {
Self {
endpoint,
flush_on_newline: None,
filter: None,
ansi: None,
}
}
pub fn flush_on_newline(mut self, flush: bool) -> Self {
self.flush_on_newline = Some(flush);
self
}
pub fn with_filter(mut self, filter: log::LevelFilter) -> Self {
self.filter = Some(filter);
self
}
pub fn with_ansi_escape_codes(mut self) -> Self {
self.ansi = Some(true);
self
}
pub fn build(self) -> Output {
Output {
endpoint: Mutex::new(self.endpoint),
filter: self.filter.unwrap_or(log::LevelFilter::Info),
flush_on_newline: self.flush_on_newline.unwrap_or(true),
ansi: self.ansi.unwrap_or(false),
}
}
}
const AVG_OUTPUTS: usize = 2;
#[derive(Debug, Default)]
pub struct RedoxLogger {
outputs: SmallVec<[Output; AVG_OUTPUTS]>,
min_filter: Option<log::LevelFilter>,
max_filter: Option<log::LevelFilter>,
max_level_in_use: Option<log::LevelFilter>,
min_level_in_use: Option<log::LevelFilter>,
process_name: Option<String>,
}
impl RedoxLogger {
pub fn new() -> Self {
Self::default()
}
fn adjust_output_level(max_filter: Option<log::LevelFilter>, min_filter: Option<log::LevelFilter>, max_in_use: &mut Option<log::LevelFilter>, min_in_use: &mut Option<log::LevelFilter>, output: &mut Output) {
if let Some(max) = max_filter {
output.filter = std::cmp::max(output.filter, max);
}
if let Some(min) = min_filter {
output.filter = std::cmp::min(output.filter, min);
}
match max_in_use {
&mut Some(ref mut max) => *max = std::cmp::max(output.filter, *max),
max @ &mut None => *max = Some(output.filter),
}
match min_in_use {
&mut Some(ref mut min) => *min = std::cmp::min(output.filter, *min),
min @ &mut None => *min = Some(output.filter),
}
}
pub fn with_output(mut self, mut output: Output) -> Self {
Self::adjust_output_level(self.max_filter, self.min_filter, &mut self.max_level_in_use, &mut self.min_level_in_use, &mut output);
self.outputs.push(output);
self
}
pub fn with_min_level_override(mut self, min: log::LevelFilter) -> Self {
self.min_filter = Some(min);
for output in &mut self.outputs {
Self::adjust_output_level(self.max_filter, self.min_filter, &mut self.max_level_in_use, &mut self.min_level_in_use, output);
}
self
}
pub fn with_max_level_override(mut self, max: log::LevelFilter) -> Self {
self.max_filter = Some(max);
for output in &mut self.outputs {
Self::adjust_output_level(self.max_filter, self.min_filter, &mut self.max_level_in_use, &mut self.min_level_in_use, output);
}
self
}
pub fn with_process_name(mut self, name: String) -> Self {
self.process_name = Some(name);
self
}
pub fn enable(self) -> Result<&'static Self, log::SetLoggerError> {
let leak = Box::leak(Box::new(self));
log::set_logger(leak)?;
if let Some(max) = leak.max_level_in_use {
log::set_max_level(max);
} else {
log::set_max_level(log::LevelFilter::Off);
}
Ok(leak)
}
fn write_record<W: Write>(ansi: bool, record: &Record, process_name: Option<&str>, writer: &mut W) -> io::Result<()> {
use termion::{color, style};
use log::Level;
// TODO: Log offloading to another thread or thread pool, maybe?
let now_local = chrono::Local::now();
// TODO: Use colors in timezone, when colors are enabled, to e.g. gray out the timezone and
// make the actual date more readable.
let time = now_local.format("%Y-%m-%dT%H-%M-%S.%.3f+%:z");
let target = record.module_path().unwrap_or(record.target());
let level = record.level();
let message = record.args();
let trace_col = color::Fg(color::LightBlack);
let debug_col = color::Fg(color::White);
let info_col = color::Fg(color::LightBlue);
let warn_col = color::Fg(color::LightYellow);
let err_col = color::Fg(color::LightRed);
let level_color: &dyn fmt::Display = match level {
Level::Trace => &trace_col,
Level::Debug => &debug_col,
Level::Info => &info_col,
Level::Warn => &warn_col,
Level::Error => &err_col,
};
let dim_white = color::Fg(color::White);
let bright_white = color::Fg(color::LightWhite);
let regular_style = "";
let bold_style = style::Bold;
let [message_color, message_style]: [&dyn fmt::Display; 2] = match level {
Level::Trace | Level::Debug => [&dim_white, ®ular_style],
Level::Info | Level::Warn | Level::Error => [&bright_white, &bold_style],
};
let target_color = color::Fg(color::White);
let time_color = color::Fg(color::LightBlack);
let reset = color::Fg(color::Reset);
let show_lines = true;
let line_number = if show_lines { record.line() } else { None };
struct LineFmt(Option<u32>, bool);
impl fmt::Display for LineFmt {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(line) = self.0 {
if self.1 {
// ansi escape codes
let col = color::Fg(color::LightBlack);
let reset = color::Fg(color::Reset);
write!(f, "{col:}:{line:}{reset:}", col=col, line=line, reset=reset)
} else {
// no ansi escape codes
write!(f, ":{}", line)
}
} else {
write!(f, "")
}
}
}
let process_name = process_name.unwrap_or("");
if ansi {
writeln!(
writer,
"{time:} [{target:}{line:} {level:}] {msg:}",
time=format_args!("{m:}{col:}{msg:}{rs:}{r:}", m=style::Italic, col=time_color, msg=time, r=reset, rs=style::Reset),
line=&LineFmt(line_number, true),
level=format_args!("{m:}{col:}{msg:}{rs:}{r:}", m=style::Bold, col=level_color, msg=level, r=reset, rs=style::Reset),
target=format_args!("{col:}{process_name:}@{target:}{r:}", col=target_color, process_name=process_name, target=target, r=reset),
msg=format_args!("{m:}{col:}{msg:}{rs:}{r:}", m=message_style, col=message_color, msg=message, r=reset, rs=style::Reset),
)
} else {
writeln!(
writer,
"{time:} [{target:}{line:} {level:}] {msg:}",
time=time,
level=level,
target=format_args!("{process_name}@{target}", process_name=process_name, target=target),
line=&LineFmt(line_number, false),
msg=message,
)
}
}
}
impl log::Log for RedoxLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
self.max_level_in_use.map(|min| metadata.level() >= min).unwrap_or(false) && self.min_level_in_use.map(|max| metadata.level() <= max).unwrap_or(false)
}
fn log(&self, record: &Record) {
for output in &self.outputs {
let mut endpoint_guard = match output.endpoint.lock() {
Ok(e) => e,
// poison error
_ => continue,
};
if record.metadata().level() <= output.filter {
let _ = Self::write_record(output.ansi, record, self.process_name.as_deref(), &mut *endpoint_guard);
}
if output.flush_on_newline {
let _ = endpoint_guard.flush();
}
}
}
fn flush(&self) {
for output in &self.outputs {
match output.endpoint.lock() {
Ok(ref mut e) => { let _ = e.flush(); }
_ => continue,
}
}
}
}
|
pub const CACHEMAGIC_NEW: &[u8; 17usize] = b"glibc-ld.so.cache";
pub const CACHE_VERSION: &[u8; 3usize] = b"1.1";
//pub const CACHEMAGIC_VERSION_NEW: &'static [u8; 20usize] = b"glibc-ld.so.cache1.1";
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct FileEntryNew {
pub flags: i32,
pub key: u32,
pub value: u32,
pub osversion: u32,
pub hwcap: u64,
}
#[test]
fn bindgen_test_layout_file_entry_new() {
assert_eq!(
::std::mem::size_of::<FileEntryNew>(),
24usize,
concat!("Size of: ", stringify!(file_entry_new))
);
assert_eq!(
::std::mem::align_of::<FileEntryNew>(),
8usize,
concat!("Alignment of ", stringify!(file_entry_new))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<FileEntryNew>())).flags as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(file_entry_new),
"::",
stringify!(flags)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<FileEntryNew>())).key as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(file_entry_new),
"::",
stringify!(key)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<FileEntryNew>())).value as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(file_entry_new),
"::",
stringify!(value)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<FileEntryNew>())).osversion as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(file_entry_new),
"::",
stringify!(osversion)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<FileEntryNew>())).hwcap as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(file_entry_new),
"::",
stringify!(hwcap)
)
);
}
#[repr(C)]
#[derive(Debug)]
pub struct CacheFileNew {
pub magic: [u8; 17usize],
pub version: [u8; 3usize],
pub nlibs: u32,
pub len_strings: u32,
pub unused: [u32; 5usize],
}
#[test]
fn bindgen_test_layout_cache_file_new() {
assert_eq!(
::std::mem::size_of::<CacheFileNew>(),
48usize,
concat!("Size of: ", stringify!(cache_file_new))
);
assert_eq!(
::std::mem::align_of::<CacheFileNew>(),
4usize,
concat!("Alignment of ", stringify!(cache_file_new))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CacheFileNew>())).magic as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(cache_file_new),
"::",
stringify!(magic)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CacheFileNew>())).version as *const _ as usize },
17usize,
concat!(
"Offset of field: ",
stringify!(cache_file_new),
"::",
stringify!(version)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CacheFileNew>())).nlibs as *const _ as usize },
20usize,
concat!(
"Offset of field: ",
stringify!(cache_file_new),
"::",
stringify!(nlibs)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CacheFileNew>())).len_strings as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(cache_file_new),
"::",
stringify!(len_strings)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CacheFileNew>())).unused as *const _ as usize },
28usize,
concat!(
"Offset of field: ",
stringify!(cache_file_new),
"::",
stringify!(unused)
)
);
}
|
extern crate proc_macro;
use proc_macro::TokenStream;
use syn::{ parse_macro_input, DeriveInput, Meta, Lit, NestedMeta, LitStr, Type };
use quote::quote;
use proc_macro2::{Ident, Span};
#[proc_macro_derive(Fetch, attributes(api))]
pub fn derive_show(item: TokenStream) -> TokenStream {
// parse the whole token tree
let input = parse_macro_input!(item as DeriveInput);
let struct_name = &input.ident;
let generics = &input.generics;
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
// get api attribute and paranmeters
let mut returns = String::new();
let mut path = String::new();
let mut method = String::new();
input.attrs.iter().for_each(|attr| {
match attr.parse_meta() {
Ok(Meta::List(ref list)) => {
if !list.path.is_ident("api") {
panic!("cannot find api attribute");
}
list.nested.iter().for_each(|nest| {
match nest {
NestedMeta::Meta(ref lit) => {
match lit {
Meta::NameValue(ref val) => {
if val.path.is_ident("path") {
path = match val.lit {
Lit::Str(ref param) => {
param.value()
}
_ => panic!("cannot get path parameter."),
}
}
if val.path.is_ident("http_method") {
method = match val.lit {
Lit::Str(ref param) => {
param.value()
}
_ => panic!("cannot get http method parameter."),
}
}
if val.path.is_ident("returns") {
returns = match val.lit {
Lit::Str(ref param) => {
param.value()
}
_ => panic!("cannot get http method parameter."),
}
}
}
_ => panic!(r#"please input attribute params like: (path="", http_method="", return="")"#),
}
();
}
_ => unreachable!(),
}
});
}
_ => unreachable!(),
}
});
// rebuild the path(String) to LitStr type
let path_ident = LitStr::new(&path, Span::call_site());
// build the return type
let returns_ident = Ident::new(&returns, Span::call_site());
let return_ty = Type::Verbatim(proc_macro2::TokenStream::from(quote! {
#returns_ident
}));
let expanded_fetch = quote! {
impl #impl_generics #struct_name #ty_generics #where_clause {
#[inline]
pub fn fetch<C: Client>(&self, client: &C)
-> Result<#return_ty, crate::error::Error>
{
client.fetch(#path_ident, self)
}
}
};
expanded_fetch.into()
}
|
use std::path::Path;
use anyhow::{anyhow, Context, Result};
use base64::Engine;
use ordered_float::NotNan;
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use super::metadata::AppleDesktop;
/// Property List for the time based wallpaper.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug)]
pub struct PropertiesH24 {
// Theme appearance details.
#[serde(rename = "ap", default)]
pub appearance: Option<PropertiesAppearance>,
// Info about the image sequence.
#[serde(rename = "ti")]
pub time_info: Vec<TimeItem>,
}
/// Wallpaper appearance depending on the theme.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug)]
pub struct PropertiesAppearance {
// Index of the image to use for a dark theme.
#[serde(rename = "d")]
pub dark: i32,
// Index of the image to use for a light theme.
#[serde(rename = "l")]
pub light: i32,
}
/// Single image sequence item of the time based wallpaper.
#[derive(Deserialize, Serialize, PartialEq, Eq, Clone, Debug)]
pub struct TimeItem {
// Index of the image in the sequence.
#[serde(rename = "i")]
pub index: usize,
// Point in time.
#[serde(rename = "t")]
pub time: NotNan<f64>,
}
/// Property List for the sun based wallpaper.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug)]
pub struct PropertiesSolar {
// Theme appearance details.
#[serde(rename = "ap", default)]
pub appearance: Option<PropertiesAppearance>,
// Info about the image sequence.
#[serde(rename = "si")]
pub solar_info: Vec<SolarItem>,
}
/// Single image sequence item of the sun based wallpaper.
#[derive(Deserialize, Serialize, PartialEq, Eq, Clone, Debug)]
pub struct SolarItem {
// Index of the image in the sequence.
#[serde(rename = "i")]
pub index: usize,
// Sun altitude.
#[serde(rename = "a")]
pub altitude: NotNan<f64>,
// Sun azimuth.
#[serde(rename = "z")]
pub azimuth: NotNan<f64>,
}
pub trait Plist: DeserializeOwned + Serialize {
/// Parse base64 encoded `plist`.
fn from_base64(base64_value: &[u8]) -> Result<Self> {
let decoded = base64::engine::general_purpose::STANDARD
.decode(base64_value)
.with_context(|| "could not decode plist base64")?;
plist::from_bytes(decoded.as_slice()).with_context(|| "could not parse plist bytes")
}
/// Deserialize `plist` from XML file.
fn from_xml_file<T: AsRef<Path>>(path: T) -> Result<Self> {
plist::from_file(path).with_context(|| "could not read plist from XML file")
}
/// Serialize `plist` as XML and write to a file.
fn to_xml_file<T: AsRef<Path>>(&self, path: T) -> Result<()> {
plist::to_file_xml(path, &self).with_context(|| "could not write plist to XML file")
}
}
impl Plist for PropertiesH24 {}
impl Plist for PropertiesSolar {}
impl Plist for PropertiesAppearance {}
/// Wallpaper properties describing either time-based or sun-based schedule
#[derive(Debug)]
pub enum Properties {
/// Time-based schedule
H24(PropertiesH24),
/// Sun-based schedule
Solar(PropertiesSolar),
/// Dark & light mode.
Appearance(PropertiesAppearance),
}
impl Properties {
/// Create an instance from apple desktop metadata.
pub fn from_apple_desktop(apple_desktop: &AppleDesktop) -> Result<Self> {
let properties = match apple_desktop {
AppleDesktop::H24(value) => {
Properties::H24(PropertiesH24::from_base64(value.as_bytes())?)
}
AppleDesktop::Solar(value) => {
Properties::Solar(PropertiesSolar::from_base64(value.as_bytes())?)
}
AppleDesktop::Apr(value) => {
Properties::Appearance(PropertiesAppearance::from_base64(value.as_bytes())?)
}
};
Ok(properties)
}
/// Load from XML file.
pub fn from_xml_file<P: AsRef<Path>>(path: P) -> Result<Self> {
if let Ok(properties_h24) = PropertiesH24::from_xml_file(&path) {
return Ok(Self::H24(properties_h24));
}
if let Ok(properties_solar) = PropertiesSolar::from_xml_file(&path) {
return Ok(Self::Solar(properties_solar));
}
if let Ok(properties_appearance) = PropertiesAppearance::from_xml_file(&path) {
return Ok(Self::Appearance(properties_appearance));
}
Err(anyhow!(
"invalid properties file {}",
path.as_ref().display()
))
}
/// Save the properties as a XML file.
pub fn to_xml_file<P: AsRef<Path>>(&self, dest_path: P) -> Result<()> {
match self {
Properties::H24(props) => props.to_xml_file(dest_path),
Properties::Solar(props) => props.to_xml_file(dest_path),
Properties::Appearance(props) => props.to_xml_file(dest_path),
}
}
/// Get number of images defined by those properties.
pub fn num_images(&self) -> usize {
// We can't just count time / solar items because they can repeat the same image
// for different times!
let max_index = match self {
Properties::H24(props) => props.time_info.iter().map(|item| item.index).max(),
Properties::Solar(props) => props.solar_info.iter().map(|item| item.index).max(),
Properties::Appearance(..) => Some(1),
};
max_index.unwrap() + 1
}
/// Get number of frames defined by those properties.
/// Frames differ from images in that one image can be displayed for more than one frame.
/// For instance: the same image in the morning and afternoon.
pub fn num_frames(&self) -> usize {
match self {
Properties::H24(props) => props.time_info.len(),
Properties::Solar(props) => props.solar_info.len(),
Properties::Appearance(..) => 2,
}
}
/// Get appearance properties if present.
pub fn appearance(&self) -> Option<&PropertiesAppearance> {
match self {
Properties::Appearance(ref appearance) => Some(appearance),
Properties::H24(PropertiesH24 {
appearance: maybe_appearance,
..
}) => maybe_appearance.as_ref(),
Properties::Solar(PropertiesSolar {
appearance: maybe_appearance,
..
}) => maybe_appearance.as_ref(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
const H24_PLIST_BASE64: &str = "YnBsaXN0MDDSAQIDBFJhcFJ0adIFBgcIUWRRbBAFEAKiCQrSCwwNDlF0UWkjP9KqqqAAAAAQANILDA8QIwAAAAAAAAAAEAEIDRATIBgaHB4jNygqLDU8RQAAAAAAAAEBAAAAAAAAABEAAAAAAAAAAAAAAAAAAABH";
const SOLAR_PLIST_BASE64: &str = "YnBsaXN0MDDSAQIDBFJhcFJzadIFBgcIUWRRbBABEACiCQrTCwwNDggPUWFRaVF6I0AuAAAAAAAAI0BgQAAAAAAA0wsMDRAHESPAUYAAAAAAACNASwAAAAAAAAgNEBMgGBocHiNCKiwuMDlJUgAAAAAAAAEBAAAAAAAAABIAAAAAAAAAAAAAAAAAAABb";
const APPEARANCE_PLIST_BASE64: &str =
"YnBsaXN0MDDSAQIDBFFsUWQQABABCA0PERMAAAAAAAABAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAAAFQ==";
#[test]
fn test_plist_h24_from_base64() {
let expected = PropertiesH24 {
appearance: Some(PropertiesAppearance { dark: 5, light: 2 }),
time_info: vec![
TimeItem {
index: 0,
time: not_nan!(0.2916666567325592),
},
TimeItem {
index: 1,
time: not_nan!(0.0),
},
],
};
let result = PropertiesH24::from_base64(H24_PLIST_BASE64.as_bytes()).unwrap();
assert_eq!(result, expected);
}
#[test]
fn test_plist_solar_from_base64() {
let expected = PropertiesSolar {
appearance: Some(PropertiesAppearance { dark: 1, light: 0 }),
solar_info: vec![
SolarItem {
index: 0,
altitude: not_nan!(15.0),
azimuth: not_nan!(130.0),
},
SolarItem {
index: 1,
altitude: not_nan!(-70.0),
azimuth: not_nan!(54.0),
},
],
};
let result = PropertiesSolar::from_base64(SOLAR_PLIST_BASE64.as_bytes()).unwrap();
assert_eq!(result, expected);
}
#[test]
fn test_plist_appearance_from_base64() {
let expected = PropertiesAppearance { dark: 1, light: 0 };
let result = PropertiesAppearance::from_base64(APPEARANCE_PLIST_BASE64.as_bytes()).unwrap();
assert_eq!(result, expected);
}
}
|
fn main() {
let arr: [i32; 4] = [10, 20, 30, 40];
println!("array is {:?}", arr);
println!("array size is :{}", arr.len());
for index in 0..4 {
println!("index is: {} & value is : {}", index, arr[index]);
}
} |
use WinVersion;
use super::Cache;
pub fn sanity_check_features(cache: &mut Cache) {
use std::collections::BTreeSet;
let mut weird_vers = BTreeSet::new();
cache.iter_features(|path, line, &ref feat| {
use features::Partitions;
/*
What we're looking for are any features that might mess up the expansion. This currently means:
- Features with upper limits on versions.
- Features that *do not* target the desktop.
*/
let mut suspect = vec![];
if let Some(ref parts) = feat.parts {
if (parts.clone() & Partitions::DesktopApp).is_empty() {
suspect.push("non-desktop-app");
}
}
if let Some(ref winver) = feat.winver {
if !winver.is_simple() {
for &ref range in winver.ranges() {
weird_vers.insert(range.end);
}
suspect.push("complex-winver");
}
}
if suspect.len() != 0 {
warn!("suspect feature set: {}:{}: {} {:?}",
path, line, suspect.join(", "), feat);
}
});
if weird_vers.len() > 0 {
warn!("suspect versions:");
for ver in weird_vers {
warn!(".. 0x{:08x} - {:?}",
ver, WinVersion::from_u32_round_up(ver));
}
}
}
|
use std::{
sync::atomic::{self, AtomicU32, AtomicU64},
time::SystemTime,
};
use chashmap::CHashMap;
use crc::crc32;
use fal::{time::Timespec, Filesystem as _};
pub mod block_group;
pub mod disk;
pub mod extents;
pub mod htree;
pub mod inode;
pub mod journal;
pub mod superblock;
pub mod xattr;
pub use inode::Inode;
pub use journal::Journal;
pub use superblock::Superblock;
use disk::Disk;
use inode::InodeIoError;
pub fn allocate_block_bytes(superblock: &Superblock) -> Box<[u8]> {
vec![0u8; superblock.block_size() as usize].into_boxed_slice()
}
trait ConvertToFalError<T> {
fn into_fal_result(self, warning_start: &'static str) -> fal::Result<T>;
}
impl<T> ConvertToFalError<T> for Result<T, InodeIoError> {
fn into_fal_result(self, warning_start: &'static str) -> fal::Result<T> {
self.map_err(|err| {
err.into_fal_error_or_with(|err| {
log::warn!("{}, because of an internal error: {}", warning_start, err)
})
})
}
}
pub use fal::{
read_u16, read_u32, read_u64, read_u8, read_uuid, write_u16, write_u32, write_u64, write_u8,
write_uuid,
};
pub struct Filesystem<D> {
pub superblock: Superblock,
pub disk: Disk<D>,
pub(crate) fhs: CHashMap<u64, FileHandle>,
pub(crate) last_fh: u64,
pub general_options: fal::Options,
pub(crate) journal: Option<Journal>,
pub(crate) info: FsInfo,
}
pub(crate) struct FsInfo {
pub(crate) free_blocks: AtomicU64,
pub(crate) free_inodes: AtomicU32,
pub(crate) kbs_written: AtomicU64,
}
#[derive(Debug, PartialEq)]
enum Open {
File,
Directory,
}
impl<D: fal::DeviceMut> Filesystem<D> {
fn open(&mut self, addr: u32, ty: Open) -> fal::Result<u64> {
let fh = FileHandle {
fh: self.last_fh,
inode: self.load_inode(addr)?,
offset: 0,
};
if ty == Open::Directory && fh.inode.ty() == inode::InodeType::File {
return Err(fal::Error::NotDirectory);
}
let num = fh.fh;
self.fhs.insert(self.last_fh, fh);
self.last_fh += 1;
Ok(num)
}
fn update_superblock(&mut self) {
self.superblock
.set_free_block_count(self.info.free_blocks.load(atomic::Ordering::Release));
self.superblock
.set_free_inode_count(self.info.free_inodes.load(atomic::Ordering::Release));
self.superblock
.set_kbs_written(self.info.kbs_written.load(atomic::Ordering::Release));
}
}
impl fal::Inode for Inode {
type InodeAddr = u32;
#[inline]
fn generation_number(&self) -> Option<u64> {
Some(self.generation())
}
#[inline]
fn addr(&self) -> u32 {
self.addr
}
fn attrs(&self) -> fal::Attributes<u32> {
fal::Attributes {
access_time: self.a_time(),
change_time: self.c_time(),
creation_time: self.cr_time().unwrap_or(Timespec { sec: 0, nsec: 0 }),
modification_time: self.m_time(),
filetype: self.ty().into(),
block_count: self.block_count,
flags: self.flags,
group_id: self.gid.into(),
hardlink_count: self.hardlink_count.into(),
inode: self.addr,
permissions: self.permissions(),
rdev: 0,
size: self.size,
user_id: self.uid.into(),
}
}
#[inline]
fn set_perm(&mut self, permissions: u16) {
self.set_permissions(permissions)
}
#[inline]
fn set_uid(&mut self, uid: u32) {
self.raw.set_uid(uid, self.os)
}
#[inline]
fn set_gid(&mut self, gid: u32) {
self.raw.set_gid(gid, self.os)
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct FileHandle {
fh: u64,
offset: u64,
inode: Inode,
}
impl<D: fal::DeviceMut> fal::Filesystem<D> for Filesystem<D> {
type InodeAddr = u32;
type InodeStruct = Inode;
type Options = ();
#[inline]
fn root_inode(&self) -> u32 {
2
}
fn mount(
mut device: D,
general_options: fal::Options,
_ext_specific_options: (),
path_bytes: &[u8],
) -> Self {
let mut superblock = Superblock::load(&mut device).unwrap();
superblock.last_mount_time = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs() as u32;
superblock.mounts_since_fsck += 1;
superblock.mounts_left_before_fsck -= 1;
if let Some(extended) = superblock.extended.as_mut() {
extended.last_mount_path[..path_bytes.len()].copy_from_slice(path_bytes);
// NUL
if path_bytes.len() < 64 {
extended.last_mount_path[63] = 0;
}
}
if !general_options.immutable {
superblock.store(&mut device).unwrap()
}
// TODO: Check for feature flags here.
let mut filesystem = Self {
disk: Disk::new(device).unwrap(),
fhs: CHashMap::new(),
last_fh: 0,
general_options,
journal: None,
info: FsInfo {
free_blocks: superblock.free_block_count().into(),
free_inodes: superblock.free_inode_count().into(),
kbs_written: superblock.kbs_written().unwrap_or(0).into(),
},
superblock,
};
filesystem.journal = match Journal::load(&filesystem) {
Ok(j) => j,
Err(err) => {
log::warn!("The filesystem journal failed loading: {}", err);
None
}
};
log::debug!(
"Superblock incompatible feature flags: {:?}",
filesystem.superblock.incompat_features()
);
log::debug!(
"Superblock compatible feature flags: {:?}",
filesystem.superblock.compat_features()
);
log::debug!(
"Superblock r/o compatible feature flags: {:?}",
filesystem.superblock.ro_compat_features()
);
/*let mut root = filesystem.load_inode(2).unwrap();
let mut tree =
extents::ExtentTree::from_inode_blocks_field(root.checksum_seed, &root.blocks).unwrap();
extents::allocate_extent_blocks(&filesystem, &mut tree, 1337, 42).unwrap();
extents::ExtentTree::to_inode_blocks_field(&tree, &mut root.blocks).unwrap();
filesystem.store_inode(&root).unwrap();
*/
filesystem
}
fn load_inode(&mut self, addr: Self::InodeAddr) -> fal::Result<Self::InodeStruct> {
Inode::load(self, addr).into_fal_result("Inode failed to load")
}
fn open_file(&mut self, addr: Self::InodeAddr) -> fal::Result<u64> {
self.open(addr, Open::File)
}
fn read(&mut self, fh: u64, offset: u64, buffer: &mut [u8]) -> fal::Result<usize> {
if self.fhs.get(&fh).is_some() {
let bytes_read = {
let inode: &inode::Inode = &self.fhs.get(&fh).unwrap().inode;
// Check that the buffer doesn't overflow the inode size.
let bytes_to_read =
std::cmp::min(offset + buffer.len() as u64, inode.size()) - offset;
let buffer = &mut buffer[..bytes_to_read as usize];
inode
.read(self, offset, buffer)
.into_fal_result("File couldn't be read")?
};
self.fhs.get_mut(&fh).unwrap().offset += bytes_read as u64;
Ok(bytes_read)
} else {
Err(fal::Error::BadFd)
}
}
fn write(&mut self, fh: u64, offset: u64, buffer: &[u8]) -> fal::Result<u64> {
if self.fhs.get(&fh).is_some() {
// There is no need here to check whether the buffer overflows the length, as there
// will be allocation in that case.
let mut inode_guard = self.fhs.get_mut(&fh).unwrap();
inode_guard
.inode
.write(self, offset, buffer)
.into_fal_result("File couldn't be written to")?;
Inode::store(&inode_guard.inode, self)
.into_fal_result("Inode couldn't be stored when writing to file")?;
// The return value is the number of bytes written. Unless this driver actually splits the
// writes depending on the buffer size, which I cannot find any real benefit of doing, the
// return value will always be the buffer length.
Ok(buffer.len() as u64)
} else {
Err(fal::Error::BadFd)
}
}
fn close(&mut self, fh: u64) -> fal::Result<()> {
// FIXME: Flush before closing.
match self.fhs.remove(&fh) {
Some(_) => Ok(()),
None => Err(fal::Error::BadFd),
}
}
fn open_directory(&mut self, inode: u32) -> fal::Result<u64> {
self.open(inode, Open::Directory)
}
fn read_directory(
&mut self,
fh: u64,
offset: i64,
) -> fal::Result<Option<fal::DirectoryEntry<u32>>> {
let handle = match self.fhs.get(&fh) {
Some(handle) => handle,
None => return Err(fal::Error::BadFd),
};
if handle.inode.ty() != inode::InodeType::Dir {
return Err(fal::Error::NotDirectory);
}
Ok(
match handle
.inode
.dir_entries(self)
.into_fal_result("Directory couldn't be read")?
.enumerate()
.nth(self.fhs.get(&fh).unwrap().offset as usize + offset as usize)
{
Some((offset, entry)) => Some({
fal::DirectoryEntry {
filetype: entry
.ty(self)
.into_fal_result("File type couldn't be detected")?
.into(),
name: entry.name,
inode: entry.inode,
offset: offset as u64,
}
}),
None => None,
},
)
}
fn lookup_direntry(
&mut self,
parent: u32,
name: &[u8],
) -> fal::Result<fal::DirectoryEntry<u32>> {
let inode = self.load_inode(parent)?;
if inode.ty() != inode::InodeType::Dir {
return Err(fal::Error::NotDirectory);
}
let (offset, entry) = match inode
.lookup_direntry(self, name)
.into_fal_result("Filename couldn't be looked up in directory")?
{
Some(inode) => inode,
None => return Err(fal::Error::NoEntity),
};
Ok(fal::DirectoryEntry {
filetype: entry
.ty(self)
.into_fal_result("File type couldn't be detected")?
.into(),
name: entry.name,
inode: entry.inode,
offset: offset as u64,
})
}
fn readlink(&mut self, inode: u32) -> fal::Result<Box<[u8]>> {
let inode = self.load_inode(inode)?;
if inode.ty() != inode::InodeType::Symlink {
return Err(fal::Error::Invalid);
}
Ok(inode
.symlink_target(self)
.into_fal_result("Failed to resolve symlink")?
.into_owned()
.into_boxed_slice())
}
fn fh_offset(&self, fh: u64) -> u64 {
self.fhs.get(&fh).unwrap().offset
}
fn fh_inode(&self, fh: u64) -> Inode {
self.fhs.get(&fh).unwrap().inode.clone()
}
fn set_fh_offset(&mut self, fh: u64, offset: u64) {
self.fhs.get_mut(&fh).unwrap().offset = offset;
}
fn filesystem_attrs(&self) -> fal::FsAttributes {
fal::FsAttributes {
block_size: self.superblock.block_size(),
free_blocks: self.superblock.unalloc_block_count.into(),
available_blocks: self.superblock.unalloc_block_count.into(), // TODO: What role does reserved_block_count have?
free_inodes: self.superblock.unalloc_inode_count.into(),
inode_count: self.superblock.inode_count.into(),
total_blocks: self.superblock.block_count.into(),
max_fname_len: 255,
}
}
fn unmount(mut self) {
if !self.general_options.immutable {
self.update_superblock();
self.superblock.store(&mut *self.disk.inner()).unwrap()
}
}
fn store_inode(&mut self, inode: &Inode) -> fal::Result<()> {
if self.general_options.immutable {
return Err(fal::Error::ReadonlyFs);
}
Inode::store(inode, self).into_fal_result("Failed to write inode")
}
fn unlink(&mut self, _parent: u32, _name: &[u8]) -> fal::Result<()> {
unimplemented!()
}
fn get_xattr(&mut self, inode: &Inode, name: &[u8]) -> fal::Result<Vec<u8>> {
// TODO: Support block-based xattrs as well.
match inode.xattrs {
Some(ref x) => x
.entries
.iter()
.find(|(k, _)| k.name() == name)
.ok_or(fal::Error::NoData)
.map(|(_, v)| v.clone()),
None => Err(fal::Error::NoData),
}
}
fn list_xattrs(&mut self, inode: &Inode) -> fal::Result<Vec<Vec<u8>>> {
// TODO: Support block-based xattrs as well.
match inode.xattrs {
Some(ref x) => Ok(x.entries.iter().map(|(entry, _)| entry.name()).collect()),
None => Ok(vec![]),
}
}
}
pub fn calculate_crc32c(value: u32, bytes: &[u8]) -> u32 {
crc32::update(value ^ (!0), &crc32::CASTAGNOLI_TABLE, bytes) ^ (!0)
}
|
fn connect() {
println!("This is the client side");
} |
// This file is part of rdma-core. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT. No part of rdma-core, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2016 The developers of rdma-core. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT.
include!("bindgen/uses/mxm_allocator_t.rs");
include!("bindgen/uses/mxm_dc_tx_policy_t.rs");
include!("bindgen/uses/mxm_ib_dev_wc_mode_t.rs");
include!("bindgen/uses/mxm_ib_lid_path_policy_t.rs");
include!("bindgen/uses/mxm_ib_map_mode_t.rs");
include!("bindgen/uses/mxm_ib_mtu_t.rs");
include!("bindgen/uses/mxm_int_mode_t.rs");
include!("bindgen/uses/mxm_rndv_mode_t.rs");
include!("bindgen/uses/mxm_shm_kcopy_mode_t.rs");
include!("bindgen/uses/mxm_stats_aggregate_mode_t.rs");
include!("bindgen/uses/mxm_ternary_value_t.rs");
include!("bindgen/uses/mxm_tl_id_t.rs");
include!("bindgen/uses/mxm_ud_ca_t.rs");
|
#![allow(dead_code, unused_imports, unused_variables)]
use std::collections::*;
const COUNT: usize = 513401;
fn main() {
println!("Part 1: {}", part1());
println!("Part 2: {}", part2());
}
fn part1() -> String {
let mut recipes = vec![3, 7];
let mut first = 0;
let mut second = 1;
while recipes.len() < COUNT + 10 {
tick(&mut recipes, &mut first, &mut second);
}
recipes[COUNT..COUNT+10]
.iter()
.map(|n| n.to_string())
.collect()
}
fn part2() -> usize {
let mut recipes = vec![3, 7];
let mut first = 0;
let mut second = 1;
let sequence = [5, 1, 3, 4, 0, 1];
let s_len = sequence.len();
let answer;
loop {
let score = tick(&mut recipes, &mut first, &mut second);
let r_len = recipes.len();
if r_len > s_len {
if &recipes[r_len-s_len..r_len] == sequence {
answer = r_len - s_len;
break;
}
}
if score >= 10 && r_len > s_len + 1 {
if &recipes[r_len - 1 - s_len..r_len - 1] == sequence {
answer = r_len - 1 - s_len;
break;
}
}
}
answer
}
fn tick(recipes: &mut Vec<usize>, first: &mut usize, second: &mut usize) -> usize {
let score = recipes[*first] + recipes[*second];
if score >= 10 {
recipes.push(score / 10);
}
recipes.push(score % 10);
*first = (*first + 1 + recipes[*first]) % recipes.len();
*second = (*second + 1 + recipes[*second]) % recipes.len();
score
}
|
// Copyright (C) 2021 Subspace Labs, Inc.
// SPDX-License-Identifier: GPL-3.0-or-later
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! Subspace node implementation.
use domain_client_operator::Bootstrapper;
use domain_runtime_primitives::opaque::Block as DomainBlock;
use frame_benchmarking_cli::BenchmarkCmd;
use futures::future::TryFutureExt;
use sc_cli::{ChainSpec, CliConfiguration, SubstrateCli};
use sc_consensus_slots::SlotProportion;
use sc_proof_of_time::PotComponents;
use sc_service::PartialComponents;
use sc_storage_monitor::StorageMonitorService;
use sp_core::crypto::Ss58AddressFormat;
use sp_core::traits::SpawnEssentialNamed;
use sp_domains::GenerateGenesisStateRoot;
use std::sync::Arc;
use subspace_node::domain::{
AccountId32ToAccountId20Converter, DomainCli, DomainGenesisBlockBuilder, DomainInstanceStarter,
DomainSubcommand, EVMDomainExecutorDispatch,
};
use subspace_node::{Cli, ExecutorDispatch, Subcommand};
use subspace_proof_of_space::chia::ChiaTable;
use subspace_runtime::{Block, RuntimeApi};
use subspace_service::{DsnConfig, SubspaceConfiguration, SubspaceNetworking};
type PosTable = ChiaTable;
/// Subspace node error.
#[derive(thiserror::Error, Debug)]
pub enum Error {
/// Subspace service error.
#[error(transparent)]
SubspaceService(#[from] subspace_service::Error),
/// CLI error.
#[error(transparent)]
SubstrateCli(#[from] sc_cli::Error),
/// Substrate service error.
#[error(transparent)]
SubstrateService(#[from] sc_service::Error),
/// Other kind of error.
#[error("Other: {0}")]
Other(String),
}
impl From<String> for Error {
#[inline]
fn from(s: String) -> Self {
Self::Other(s)
}
}
fn set_default_ss58_version<C: AsRef<dyn ChainSpec>>(chain_spec: C) {
let maybe_ss58_address_format = chain_spec
.as_ref()
.properties()
.get("ss58Format")
.map(|v| {
v.as_u64()
.expect("ss58Format must always be an unsigned number; qed")
})
.map(|v| {
v.try_into()
.expect("ss58Format must always be within u16 range; qed")
})
.map(Ss58AddressFormat::custom);
if let Some(ss58_address_format) = maybe_ss58_address_format {
sp_core::crypto::set_default_ss58_version(ss58_address_format);
}
}
fn main() -> Result<(), Error> {
let cli = Cli::from_args();
match &cli.subcommand {
Some(Subcommand::Key(cmd)) => cmd.run(&cli)?,
Some(Subcommand::BuildSpec(cmd)) => {
let runner = cli.create_runner(cmd)?;
runner.sync_run(|config| cmd.run(config.chain_spec, config.network))?
}
Some(Subcommand::CheckBlock(cmd)) => {
let runner = cli.create_runner(cmd)?;
set_default_ss58_version(&runner.config().chain_spec);
runner.async_run(|config| {
let PartialComponents {
client,
import_queue,
task_manager,
..
} = subspace_service::new_partial::<PosTable, RuntimeApi, ExecutorDispatch>(
&config, None, None,
)?;
Ok((
cmd.run(client, import_queue).map_err(Error::SubstrateCli),
task_manager,
))
})?;
}
Some(Subcommand::ExportBlocks(cmd)) => {
let runner = cli.create_runner(cmd)?;
set_default_ss58_version(&runner.config().chain_spec);
runner.async_run(|config| {
let PartialComponents {
client,
task_manager,
..
} = subspace_service::new_partial::<PosTable, RuntimeApi, ExecutorDispatch>(
&config, None, None,
)?;
Ok((
cmd.run(client, config.database)
.map_err(Error::SubstrateCli),
task_manager,
))
})?;
}
Some(Subcommand::ExportState(cmd)) => {
let runner = cli.create_runner(cmd)?;
set_default_ss58_version(&runner.config().chain_spec);
runner.async_run(|config| {
let PartialComponents {
client,
task_manager,
..
} = subspace_service::new_partial::<PosTable, RuntimeApi, ExecutorDispatch>(
&config, None, None,
)?;
Ok((
cmd.run(client, config.chain_spec)
.map_err(Error::SubstrateCli),
task_manager,
))
})?;
}
Some(Subcommand::ImportBlocks(cmd)) => {
let runner = cli.create_runner(cmd)?;
set_default_ss58_version(&runner.config().chain_spec);
runner.async_run(|config| {
let PartialComponents {
client,
import_queue,
task_manager,
..
} = subspace_service::new_partial::<PosTable, RuntimeApi, ExecutorDispatch>(
&config, None, None,
)?;
Ok((
cmd.run(client, import_queue).map_err(Error::SubstrateCli),
task_manager,
))
})?;
}
Some(Subcommand::PurgeChain(cmd)) => {
// This is a compatibility layer to make sure we wipe old data from disks of our users
if let Some(base_dir) = dirs::data_local_dir() {
for chain in &[
"subspace_gemini_2a",
"subspace_gemini_3a",
"subspace_gemini_3b",
"subspace_gemini_3c",
"subspace_gemini_3d",
] {
let _ = std::fs::remove_dir_all(
base_dir.join("subspace-node").join("chains").join(chain),
);
}
}
let runner = cli.create_runner(&cmd.base)?;
runner.sync_run(|consensus_chain_config| {
let domain_cli = DomainCli::new(
cmd.base
.base_path()?
.map(|base_path| base_path.path().to_path_buf()),
cli.domain_args.into_iter(),
);
let domain_config = SubstrateCli::create_configuration(
&domain_cli,
&domain_cli,
consensus_chain_config.tokio_handle.clone(),
)
.map_err(|error| {
sc_service::Error::Other(format!(
"Failed to create domain configuration: {error:?}"
))
})?;
cmd.run(consensus_chain_config, domain_config)
})?;
}
Some(Subcommand::Revert(cmd)) => {
let runner = cli.create_runner(cmd)?;
set_default_ss58_version(&runner.config().chain_spec);
runner.async_run(|config| {
let PartialComponents {
client,
backend,
task_manager,
..
} = subspace_service::new_partial::<PosTable, RuntimeApi, ExecutorDispatch>(
&config, None, None,
)?;
Ok((
cmd.run(client, backend, None).map_err(Error::SubstrateCli),
task_manager,
))
})?;
}
Some(Subcommand::ChainInfo(cmd)) => {
let runner = cli.create_runner(cmd)?;
runner.sync_run(|config| cmd.run::<Block>(&config))?;
}
Some(Subcommand::Benchmark(cmd)) => {
let runner = cli.create_runner(cmd)?;
runner.sync_run(|config| {
// This switch needs to be in the client, since the client decides
// which sub-commands it wants to support.
match cmd {
BenchmarkCmd::Pallet(cmd) => {
if !cfg!(feature = "runtime-benchmarks") {
return Err(
"Runtime benchmarking wasn't enabled when building the node. \
You can enable it with `--features runtime-benchmarks`."
.into(),
);
}
cmd.run::<Block, ExecutorDispatch>(config)
}
BenchmarkCmd::Block(cmd) => {
let PartialComponents { client, .. } = subspace_service::new_partial::<
PosTable,
RuntimeApi,
ExecutorDispatch,
>(
&config, None, None
)?;
cmd.run(client)
}
BenchmarkCmd::Storage(cmd) => {
let PartialComponents {
client, backend, ..
} = subspace_service::new_partial::<PosTable, RuntimeApi, ExecutorDispatch>(
&config, None, None,
)?;
let db = backend.expose_db();
let storage = backend.expose_storage();
cmd.run(config, client, db, storage)
}
BenchmarkCmd::Overhead(_cmd) => {
todo!("Not implemented")
// let ext_builder = BenchmarkExtrinsicBuilder::new(client.clone());
//
// cmd.run(
// config,
// client,
// command_helper::inherent_benchmark_data()?,
// Arc::new(ext_builder),
// )
}
BenchmarkCmd::Machine(cmd) => cmd.run(
&config,
frame_benchmarking_cli::SUBSTRATE_REFERENCE_HARDWARE.clone(),
),
BenchmarkCmd::Extrinsic(_cmd) => {
todo!("Not implemented")
// let PartialComponents { client, .. } =
// subspace_service::new_partial(&config)?;
// // Register the *Remark* and *TKA* builders.
// let ext_factory = ExtrinsicFactory(vec![
// Box::new(RemarkBuilder::new(client.clone())),
// Box::new(TransferKeepAliveBuilder::new(
// client.clone(),
// Sr25519Keyring::Alice.to_account_id(),
// ExistentialDeposit: get(),
// )),
// ]);
//
// cmd.run(client, inherent_benchmark_data()?, &ext_factory)
}
}
})?;
}
Some(Subcommand::Domain(domain_cmd)) => match domain_cmd {
DomainSubcommand::Benchmark(cmd) => {
let runner = cli.create_runner(cmd)?;
runner.sync_run(|consensus_chain_config| {
let domain_cli = DomainCli::new(
cli.run
.base_path()?
.map(|base_path| base_path.path().to_path_buf()),
cli.domain_args.into_iter(),
);
let domain_config = domain_cli
.create_domain_configuration::<_, AccountId32ToAccountId20Converter>(
consensus_chain_config.tokio_handle,
)
.map_err(|error| {
sc_service::Error::Other(format!(
"Failed to create domain configuration: {error:?}"
))
})?;
match cmd {
BenchmarkCmd::Pallet(cmd) => {
if !cfg!(feature = "runtime-benchmarks") {
return Err(
"Runtime benchmarking wasn't enabled when building the node. \
You can enable it with `--features runtime-benchmarks`."
.into(),
);
}
cmd.run::<DomainBlock, EVMDomainExecutorDispatch>(
domain_config.service_config,
)
}
_ => todo!("Not implemented"),
}
})?;
}
_ => unimplemented!("Domain subcommand"),
},
None => {
let runner = cli.create_runner(&cli.run)?;
set_default_ss58_version(&runner.config().chain_spec);
runner.run_node_until_exit(|consensus_chain_config| async move {
let tokio_handle = consensus_chain_config.tokio_handle.clone();
let database_source = consensus_chain_config.database.clone();
let pot_components = if cli.pot_role.is_pot_enabled() {
Some(PotComponents::new(cli.pot_role.is_time_keeper()))
} else {
None
};
let consensus_chain_node = {
let span = sc_tracing::tracing::info_span!(
sc_tracing::logging::PREFIX_LOG_SPAN,
name = "Consensus"
);
let _enter = span.enter();
let dsn_config = {
let network_keypair = consensus_chain_config
.network
.node_key
.clone()
.into_keypair()
.map_err(|error| {
sc_service::Error::Other(format!(
"Failed to convert network keypair: {error:?}"
))
})?;
let dsn_bootstrap_nodes = if cli.dsn_bootstrap_nodes.is_empty() {
consensus_chain_config
.chain_spec
.properties()
.get("dsnBootstrapNodes")
.map(|d| serde_json::from_value(d.clone()))
.transpose()
.map_err(|error| {
sc_service::Error::Other(format!(
"Failed to decode DSN bootsrap nodes: {error:?}"
))
})?
.unwrap_or_default()
} else {
cli.dsn_bootstrap_nodes
};
// TODO: Libp2p versions for Substrate and Subspace diverged.
// We get type compatibility by encoding and decoding the original keypair.
let encoded_keypair = network_keypair
.to_protobuf_encoding()
.expect("Keypair-to-protobuf encoding should succeed.");
let keypair =
subspace_networking::libp2p::identity::Keypair::from_protobuf_encoding(
&encoded_keypair,
)
.expect("Keypair-from-protobuf decoding should succeed.");
DsnConfig {
keypair,
base_path: cli.run.base_path()?.map(|base_path| {
base_path.config_dir(consensus_chain_config.chain_spec.id())
}),
listen_on: cli.dsn_listen_on,
bootstrap_nodes: dsn_bootstrap_nodes,
reserved_peers: cli.dsn_reserved_peers,
// Override enabling private IPs with --dev
allow_non_global_addresses_in_dht: cli.dsn_enable_private_ips
|| cli.run.shared_params.dev,
max_in_connections: cli.dsn_in_connections,
max_out_connections: cli.dsn_out_connections,
max_pending_in_connections: cli.dsn_pending_in_connections,
max_pending_out_connections: cli.dsn_pending_out_connections,
target_connections: cli.dsn_target_connections,
external_addresses: cli.dsn_external_addresses,
}
};
let consensus_chain_config = SubspaceConfiguration {
base: consensus_chain_config,
// Domain node needs slots notifications for bundle production.
force_new_slot_notifications: !cli.domain_args.is_empty(),
subspace_networking: SubspaceNetworking::Create { config: dsn_config },
sync_from_dsn: cli.sync_from_dsn,
enable_subspace_block_relay: cli.enable_subspace_block_relay
|| cli.run.is_dev().unwrap_or(false),
};
let construct_domain_genesis_block_builder =
|backend, executor| -> Arc<dyn GenerateGenesisStateRoot> {
Arc::new(DomainGenesisBlockBuilder::new(backend, executor))
};
let partial_components =
subspace_service::new_partial::<PosTable, RuntimeApi, ExecutorDispatch>(
&consensus_chain_config,
Some(&construct_domain_genesis_block_builder),
pot_components,
)
.map_err(|error| {
sc_service::Error::Other(format!(
"Failed to build a full subspace node: {error:?}"
))
})?;
subspace_service::new_full::<PosTable, _, _, _>(
consensus_chain_config,
partial_components,
true,
SlotProportion::new(3f32 / 4f32),
)
.await
.map_err(|error| {
sc_service::Error::Other(format!(
"Failed to build a full subspace node: {error:?}"
))
})?
};
StorageMonitorService::try_spawn(
cli.storage_monitor,
database_source,
&consensus_chain_node.task_manager.spawn_essential_handle(),
)
.map_err(|error| {
sc_service::Error::Other(format!("Failed to start storage monitor: {error:?}"))
})?;
// Run a domain node.
if !cli.domain_args.is_empty() {
let span = sc_tracing::tracing::info_span!(
sc_tracing::logging::PREFIX_LOG_SPAN,
name = "Domain"
);
let _enter = span.enter();
let domain_cli = DomainCli::new(
cli.run
.base_path()?
.map(|base_path| base_path.path().to_path_buf()),
cli.domain_args.into_iter(),
);
let domain_id = domain_cli.domain_id;
let bootstrapper =
Bootstrapper::<DomainBlock, _, _>::new(consensus_chain_node.client.clone());
let domain_starter = DomainInstanceStarter {
domain_cli,
tokio_handle,
consensus_client: consensus_chain_node.client.clone(),
block_importing_notification_stream: consensus_chain_node
.block_importing_notification_stream
.clone(),
new_slot_notification_stream: consensus_chain_node
.new_slot_notification_stream
.clone(),
consensus_network_service: consensus_chain_node.network_service.clone(),
consensus_sync_service: consensus_chain_node.sync_service.clone(),
select_chain: consensus_chain_node.select_chain.clone(),
};
consensus_chain_node
.task_manager
.spawn_essential_handle()
.spawn_essential_blocking(
"domain",
None,
Box::pin(async move {
let bootstrap_result =
match bootstrapper.fetch_domain_bootstrap_info(domain_id).await
{
Err(err) => {
log::error!(
"Domain bootsrapper exited with an error {err:?}"
);
return;
}
Ok(res) => res,
};
if let Err(error) = domain_starter.start(bootstrap_result).await {
log::error!("Domain starter exited with an error {error:?}");
}
}),
);
}
consensus_chain_node.network_starter.start_network();
Ok::<_, Error>(consensus_chain_node.task_manager)
})?;
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use sc_cli::Database;
#[test]
fn rocksdb_disabled_in_substrate() {
assert_eq!(
Database::variants(),
&["paritydb", "paritydb-experimental", "auto"],
);
}
}
|
use crate::custom_types::array::Array;
use crate::custom_types::bytes::LangBytes;
use crate::custom_types::dict::Dict;
use crate::custom_types::enumerate::Enumerate;
use crate::custom_types::exceptions::{
arithmetic_error, assertion_error, io_error, not_implemented, null_error, value_error,
};
use crate::custom_types::file::FileObj;
use crate::custom_types::interfaces::{Callable, Iterable, Iterator, Throwable};
use crate::custom_types::list::List;
use crate::custom_types::range::Range;
use crate::custom_types::set::Set;
use crate::custom_types::slice::Slice;
use crate::first;
use crate::fmt::format_internal;
use crate::function::Function;
use crate::name::Name;
use crate::operator::Operator;
use crate::runtime::Runtime;
use crate::std_type::Type;
use crate::std_variable::{StdVarMethod, StdVariable};
use crate::string_var::StringVar;
use crate::test_fn::test_internal;
use crate::variable::{FnResult, Variable};
fn print() -> Variable {
Function::Native(print_impl).into()
}
fn print_impl(args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
for arg in args {
println!("{}", arg.str(runtime)?);
}
runtime.return_0()
}
fn input() -> Variable {
Function::Native(input_impl).into()
}
fn input_impl(args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert_eq!(args.len(), 1);
print!("{}", first(args).str(runtime)?);
let mut input = String::new();
match std::io::stdin().read_line(&mut input) {
Ok(_) => runtime.push(StringVar::from(input).into()),
Err(x) => runtime.throw_quick(io_error(), format!("Could not read from stdin: {}", x))?,
}
runtime.return_0()
}
fn repr() -> Variable {
Function::Native(repr_impl).into()
}
fn repr_impl(args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert_eq!(args.len(), 1);
runtime.call_op(first(args), Operator::Repr, Vec::new())
}
fn iter() -> Variable {
Function::Native(iter_impl).into()
}
fn iter_impl(args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert_eq!(args.len(), 1);
runtime.call_op(first(args), Operator::Iter, Vec::new())
}
fn reversed() -> Variable {
Function::Native(reversed_impl).into()
}
fn reversed_impl(args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert_eq!(args.len(), 1);
runtime.call_op(first(args), Operator::Reversed, Vec::new())
}
fn id() -> Variable {
Function::Native(id_impl).into()
}
fn id_impl(args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert_eq!(args.len(), 1);
runtime.return_1(args[0].id().into())
}
fn enumerate() -> Variable {
Function::Native(enumerate_impl).into()
}
fn enumerate_impl(args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert_eq!(args.len(), 1);
let iterable = first(args).iter(runtime)?;
runtime.return_1(Enumerate::new(iterable).into())
}
fn hash() -> Variable {
Function::Native(hash_impl).into()
}
fn hash_impl(args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert_eq!(args.len(), 1);
let hash = first(args).hash(runtime)?;
runtime.return_1(hash.into())
}
fn option() -> Variable {
Function::Native(option_impl).into()
}
fn option_impl(args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert_eq!(args.len(), 1);
let opt = Option::Some(first(args));
runtime.return_1(opt.into())
}
pub fn builtin_of(index: usize) -> Variable {
match index {
0 => print(),
1 => Callable::cls().into(),
2 => Type::Bigint.into(),
3 => Type::String.into(),
4 => Type::Bool.into(),
5 => Range::range_type().into(),
6 => Type::Type.into(),
7 => iter(),
8 => repr(),
9 => input(),
10 => List::list_type().into(),
11 => Set::set_type().into(),
12 => Type::Char.into(),
13 => FileObj::open_type().into(),
14 => reversed(),
15 => Slice::slice_type().into(),
16 => id(),
17 => Array::array_type().into(),
18 => enumerate(),
19 => LangBytes::bytes_type().into(),
20 => Dict::dict_type().into(),
21 => Type::Object.into(),
22 => not_implemented().into(),
23 => Type::Tuple.into(),
24 => Throwable::cls().into(),
25 => Type::Null.into(),
26 => hash(),
27 => value_error().into(),
28 => null_error().into(),
29 => Iterable::cls().into(),
30 => assertion_error().into(),
31 => fmt_internal(),
32 => Iterator::cls().into(),
33 => arithmetic_error().into(),
34 => tst_internal(),
35 => option(),
x => unimplemented!("Builtin number {}", x),
}
}
pub fn default_methods(name: Name) -> Option<StdVarMethod> {
if let Name::Operator(o) = name {
let result = match o {
Operator::Repr => default_repr,
Operator::Str => default_str,
Operator::Equals => default_eq,
Operator::Bool => default_bool,
Operator::In => default_in,
_ => return Option::None,
};
Option::Some(StdVarMethod::Native(result))
} else {
Option::None
}
}
fn default_repr(this: StdVariable, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert!(args.is_empty());
let result = format!("<{}: {:#X}>", this.get_type().str(), this.var_ptr());
runtime.return_1(StringVar::from(result).into())
}
fn default_str(this: StdVariable, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert!(args.is_empty());
runtime.call_op(this.into(), Operator::Repr, args)
}
fn default_bool(_this: StdVariable, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert!(args.is_empty());
runtime.return_1(true.into())
}
fn default_eq(this: StdVariable, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
let this_var: Variable = this.into();
for arg in args {
if this_var != arg {
return runtime.return_1(false.into());
}
}
runtime.return_1(true.into())
}
fn default_in(this: StdVariable, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
let checked_var = first(args);
let this_iter = this.iter(runtime)?;
while let Option::Some(val) = this_iter.next(runtime)?.take_first() {
if checked_var.clone().equals(val, runtime)? {
return runtime.return_1(true.into());
}
}
runtime.return_1(false.into())
}
fn fmt_internal() -> Variable {
Function::Native(format_internal).into()
}
fn tst_internal() -> Variable {
Function::Native(test_internal).into()
}
|
use actix_web::http::StatusCode;
use actix_web::HttpResponse;
use failure::Fail;
// use octo_budget_lib::auth_token::UserId;
use diesel::result::Error as DieselError;
use serde::{Serialize, Serializer};
#[derive(Fail, Debug, Clone, Copy, PartialEq)]
pub enum ValidationError {
#[fail(display = "Unable to log in with provided credentials.")]
AuthFailed,
#[fail(display = "This field may not be blank.")]
CannotBeBlank,
#[fail(display = "This field is required.")]
MustPresent,
}
impl actix_web::error::ResponseError for ValidationError {
fn error_response(&self) -> HttpResponse {
HttpResponse::new(StatusCode::BAD_REQUEST)
}
}
impl Serialize for ValidationError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.collect_str(&self.to_string())
}
}
#[derive(Debug, Fail)]
pub enum Error {
#[fail(display = "Cannot read sorted tags from redis {}", _0)]
Redis(redis::RedisError),
// #[fail(display = "Cannot find user by id: `{}'", _0)]
// UserNotFound(UserId),
//
// #[fail(display = "Cannot find record")]
// NotFound,
//
// #[fail(display = "Unknown database error {}", _0)]
// UnknownDb(#[cause] diesel::result::Error),
#[fail(display = "Unexpected error {}", _0)]
Unknown(#[cause] failure::Error),
// #[fail(display = "Cannot get database connection: {}", _0)]
// Connection(#[cause] r2d2::Error),
//
// #[fail(display = "Cannot get database connection: {}", _0)]
// Connection2(#[cause] diesel::r2d2::Error),
}
#[derive(Debug, Fail)]
pub enum DbError {
#[fail(display = "Thread pool is gone")]
ThreadPoolIsGone,
// TODO: add search query
#[fail(display = "Failed to find record from table {}", _0)]
NotFound(&'static str),
#[fail(display = "Cannot get database connection: {}", _0)]
NoConnection(#[cause] r2d2::Error),
#[fail(display = "Unknown database error {}", _0)]
Unknown(#[cause] diesel::result::Error),
#[fail(display = "Cannot update {} with id: `{}'", _0, _1)]
NotUpdated(&'static str, i32),
#[fail(display = "Unexpected query result: {}", _0)]
UnexpectedResult(&'static str),
}
pub fn add_table_name(table_name: &'static str) -> impl Fn(DieselError) -> DbError {
move |error: DieselError| match error {
DieselError::NotFound => DbError::NotFound(table_name),
_ => DbError::Unknown(error),
}
}
pub type DbResult<T> = Result<T, DbError>;
impl From<r2d2::Error> for DbError {
fn from(error: r2d2::Error) -> Self {
DbError::NoConnection(error)
}
}
impl From<diesel::result::Error> for DbError {
fn from(error: diesel::result::Error) -> Self {
match error {
DieselError::NotFound => Self::NotFound("Unspecified table"),
_ => Self::Unknown(error),
}
}
}
impl actix_web::error::ResponseError for DbError {
fn error_response(&self) -> HttpResponse {
match self {
DbError::NotFound(_n) => HttpResponse::new(StatusCode::NOT_FOUND),
_ => HttpResponse::new(StatusCode::INTERNAL_SERVER_ERROR),
}
}
}
// pub type OctoApiResult<T> = Result<T, Error>;
impl From<redis::RedisError> for Error {
fn from(error: redis::RedisError) -> Self {
Error::Redis(error)
}
}
impl From<failure::Error> for Error {
fn from(error: failure::Error) -> Self {
Error::Unknown(error)
}
}
impl From<actix::MailboxError> for Error {
fn from(error: actix::MailboxError) -> Self {
Error::Unknown(error.into())
}
}
// impl From<diesel::r2d2::Error> for Error {
// fn from(error: diesel::r2d2::Error) -> Self {
// Error::Connection2(error)
// }
// }
// impl From<diesel::result::Error> for Error {
// fn from(error: diesel::result::Error) -> Self {
// match error {
// diesel::result::Error::NotFound => Error::NotFound,
// err => Error::UnknownDb(err),
// }
// }
// }
impl actix_web::error::ResponseError for Error {
fn error_response(&self) -> HttpResponse {
match self {
_ => HttpResponse::new(StatusCode::INTERNAL_SERVER_ERROR),
}
}
}
|
/// The value of the Scale bits in a SIB byte
#[repr(u8)]
#[derive(Copy,Clone,Debug,PartialEq,Eq)]
pub enum Scale {
Val1 = 0,
Val2 = 64,
Val4 = 128,
Val8 = 192
}
impl Into<u8> for Scale {
#[inline(always)]
fn into(self) -> u8 {
use std::mem;
unsafe{ mem::transmute(self) }
}
}
impl Scale {
/// Start building the SIB value
#[inline(always)]
pub fn to_sib_seed(&self) -> u8 {
self.clone().into()
}
}
named!(pub parse_scale<Scale>, do_parse!(
x: alt!(
do_parse!(tag!(b"1") >> (Scale::Val1)) |
do_parse!(tag!(b"2") >> (Scale::Val2)) |
do_parse!(tag!(b"4") >> (Scale::Val4)) |
do_parse!(tag!(b"8") >> (Scale::Val8))
) >>
(x)
));
#[test]
fn test_into_sib() {
assert_eq!(Scale::Val1.to_sib_seed(), 0b0000_0000u8);
assert_eq!(Scale::Val2.to_sib_seed(), 0b0100_0000u8);
assert_eq!(Scale::Val4.to_sib_seed(), 0b1000_0000u8);
assert_eq!(Scale::Val8.to_sib_seed(), 0b1100_0000u8);
}
#[test]
fn test_parse_scale() {
use super::super::super::nom::IResult;
let dut = b"1 ";
let expect = Scale::Val1;
match parse_scale(dut) {
IResult::Done(rem,val) => {
assert_eq!(rem, b" ");
assert_eq!(val, expect);
},
IResult::Incomplete(n) => {
panic!("Parse error {:?} expected {:?}", n, expect);
},
IResult::Error(e) => {
panic!("Parse error {:?} expected {:?}", e, expect);
}
};
let dut = b"2 ";
let expect = Scale::Val2;
match parse_scale(dut) {
IResult::Done(rem,val) => {
assert_eq!(rem, b" ");
assert_eq!(val, expect);
},
IResult::Incomplete(n) => {
panic!("Parse error {:?} expected {:?}", n, expect);
},
IResult::Error(e) => {
panic!("Parse error {:?} expected {:?}", e, expect);
}
};
let dut = b"4 ";
let expect = Scale::Val4;
match parse_scale(dut) {
IResult::Done(rem,val) => {
assert_eq!(rem, b" ");
assert_eq!(val, expect);
},
IResult::Incomplete(n) => {
panic!("Parse error {:?} expected {:?}", n, expect);
},
IResult::Error(e) => {
panic!("Parse error {:?} expected {:?}", e, expect);
}
};
let dut = b"8 ";
let expect = Scale::Val8;
match parse_scale(dut) {
IResult::Done(rem,val) => {
assert_eq!(rem, b" ");
assert_eq!(val, expect);
},
IResult::Incomplete(n) => {
panic!("Parse error {:?} expected {:?}", n, expect);
},
IResult::Error(e) => {
panic!("Parse error {:?} expected {:?}", e, expect);
}
};
}
|
// This file is part of Substrate.
// Copyright (C) 2020-2021 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Balances pallet benchmarking.
#![cfg(feature = "runtime-benchmarks")]
use super::*;
use crate::Pallet as UserPriv;
use frame_benchmarking::{account, benchmarks};
use frame_support::traits::EnsureOrigin;
use frame_system::RawOrigin;
use node_primitives::user_privileges::{Privilege, UserPrivilegeInterface};
use sp_runtime::traits::StaticLookup;
benchmarks! {
set_user_privilege {
let user: T::AccountId = account("user", 0, 2);
let user_lookup = T::Lookup::unlookup(user.clone());
let origin = T::ForceOrigin::successful_origin();
}: _<T::RuntimeOrigin>(origin, user_lookup, Privilege::LockerMember)
verify {
assert_eq!(UserPriv::<T>::has_privilege(&user, Privilege::LockerMember),true);
}
clear_user_privilege {
let user: T::AccountId = account("user", 0, 2);
let user_lookup = T::Lookup::unlookup(user.clone());
let origin = T::ForceOrigin::successful_origin();
}: _<T::RuntimeOrigin>(origin, user_lookup)
verify {
assert_eq!(UserPriv::<T>::has_privilege(&user, Privilege::LockerMember),false);
}
set_evm_privilege {
let user: T::AccountId = account("user", 0, 1);
let user_lookup = T::Lookup::unlookup(user.clone());
let origin = T::ForceOrigin::successful_origin();
let _ = UserPriv::<T>::set_user_privilege(origin, user_lookup, Privilege::EvmAddressSetter);
}: _(RawOrigin::Signed(user), H160::from_low_u64_be(88), Privilege::LockerMember)
verify {
assert_eq!(UserPriv::<T>::has_evm_privilege(&H160::from_low_u64_be(88), Privilege::LockerMember),true);
}
clear_evm_privilege {
let user: T::AccountId = account("user", 0, 1);
let user_lookup = T::Lookup::unlookup(user.clone());
let origin = T::ForceOrigin::successful_origin();
let _ = UserPriv::<T>::set_user_privilege(origin, user_lookup, Privilege::EvmAddressSetter);
}: _(RawOrigin::Signed(user), H160::from_low_u64_be(88))
verify {
assert_eq!(UserPriv::<T>::has_evm_privilege(&H160::from_low_u64_be(88), Privilege::LockerMember),false);
}
impl_benchmark_test_suite!(UserPriv, crate::tests::new_test_ext(), crate::tests::Test);
}
|
extern crate clap;
#[macro_use]
extern crate lazy_static;
extern crate env_logger;
extern crate hyper;
#[macro_use]
extern crate log;
extern crate rustc_serialize;
mod models;
use clap::{Arg, App, SubCommand};
use hyper::Url;
use hyper::client::Client;
use hyper::status::StatusCode;
use hyper::client::response::Response;
use models::{LabsReport, LabsEndpoint};
use rustc_serialize::json;
use std::{thread, time};
use std::io::Read;
lazy_static! {
static ref API_ENDPOINT: String = "https://api.ssllabs.com/api/v2/".to_owned();
static ref CLIENT: Client = Client::new();
}
fn make_get_request(url: String) -> (String, Response) {
let finalized_url = Url::parse(&url);
if finalized_url.is_err() {
panic!("Somehow got an invalid URL: [ {} ]", url);
}
let finalized_url = finalized_url.unwrap();
let resp = CLIENT.get(finalized_url).send();
if resp.is_err() {
panic!("Failed to send HTTP Request to: [ {} ] cause: [ {:?} ]", url, resp.err().unwrap());
}
let mut resp = resp.unwrap();
let mut return_value = String::new();
resp.read_to_string(&mut return_value).unwrap();
(return_value, resp)
}
fn ensure_success(resp: Response) {
if resp.status == StatusCode::BadRequest {
panic!("Somehow invalid parameters were passed.");
} else if resp.status == StatusCode::TooManyRequests {
panic!("You're sending too many requests! Slow down cowboy!");
} else if resp.status == StatusCode::InternalServerError {
panic!("Internal Server Error :(");
} else if resp.status == StatusCode::ServiceUnavailable {
panic!("Service not available right now!");
}
}
fn start_new_scan(host: String) {
info!("Starting Brand New Scan for: [ {} ]", host);
let (_, _) = make_get_request(format!("{}analyze?host={}&startNew=true&all=done", *API_ENDPOINT, host));
while true {
// Be nice to SSLLabs
thread::sleep(time::Duration::from_secs(5));
let (first_req, resp) = make_get_request(format!("{}analyze?host={}", *API_ENDPOINT, host));
ensure_success(resp);
let decoded: LabsReport = json::decode(&first_req).unwrap();
info!("Status: [ {} ]", decoded.status);
if decoded.status == "READY".to_string() {
let endpoints = decoded.endpoints;
for ref endpoint in endpoints.iter() {
let (endpoint_req, end_resp) = make_get_request(format!("{}getEndpointData?host={}&s={}", *API_ENDPOINT, host, endpoint.ipAddress));
ensure_success(end_resp);
let results: LabsEndpoint = json::decode(&endpoint_req).unwrap();
debug!("{:?}", results);
println!("IP Address: [ {} ]. Grade: [ {} ].", results.ipAddress, results.grade.unwrap());
}
return;
}
}
}
fn get_endpoint_data(host: String, ip: String) {
info!("Grabbing Endpoint Data for: [ {} ]", host);
let (string_data, resp) = make_get_request(format!("{}getEndpointData?host={}&s={}", *API_ENDPOINT, host, ip));
ensure_success(resp);
let results: Result<LabsEndpoint, _> = json::decode(&string_data);
if results.is_err() {
panic!("Couldn't grab old JSON: [ {:?} ]", results.err().unwrap());
}
let results: LabsEndpoint = results.unwrap();
debug!("{:?}", results);
println!("IP Address: [ {} ]. Grade: [ {} ].", results.ipAddress, results.grade.unwrap());
}
fn main() {
env_logger::init().unwrap();
info!("Starting SslScan...");
let matches = App::new("sslscan")
.version("1.0.0")
.about("Scans SSL Certs with SSL Labs so you don't have too.")
.subcommand(
SubCommand::with_name("scan_new")
.version("1.0.0")
.arg(
Arg::with_name("host")
.short("h")
.help("The host to scan.")
.required(true)
.takes_value(true)
)
)
.subcommand(
SubCommand::with_name("get_old")
.version("1.0.0")
.arg(
Arg::with_name("host")
.short("h")
.help("The host to scan.")
.required(true)
.takes_value(true)
)
.arg(
Arg::with_name("ip")
.short("i")
.help("The IP of the host.")
.required(true)
.takes_value(true)
)
)
.get_matches();
if let Some(matches) = matches.subcommand_matches("scan_new") {
return start_new_scan(matches.value_of("host").unwrap().to_owned());
}
if let Some(matches) = matches.subcommand_matches("get_old") {
return get_endpoint_data(matches.value_of("host").unwrap().to_owned(), matches.value_of("ip").unwrap().to_owned());
}
println!("Please provide A valid command! Use `--help` if you're unsure.");
}
|
#[derive(Debug)]
struct student<'a> {
name: &'a str,
}
fn main() {
let n1_student_01 = String::from("AreebSiddiqui");
let student_01 = n1_student_01.split('.')
.next()
.expect("Could not find '.'");
let my_n_student_01 = student {name: student_01};
println!("{:#?}",my_n_student_01);
}
//*DIFFERENCE BETWEEN STRING AND STR*
//**
// fn main(){
// let mut s = String::from("Hello, World!");
// let _s = "Hello, world";
// println!("Capacity:{}",s.capacity());
// s.push_str("lalala");
// println!("Len:{}",s.len());
// } |
// #[macro_use]
// extern crate lazy_static;
use base64::encode as base64_encode;
use num_enum::{IntoPrimitive, TryFromPrimitive, TryFromPrimitiveError};
use serde_json::Value;
use std::collections::HashMap;
use std::convert::{From, TryFrom, TryInto};
use std::error::Error;
use std::fmt;
use std::time::{SystemTime, UNIX_EPOCH};
use core::array::TryFromSliceError;
// lazy_static! {
// static ref CURVE25519: () = {};
// }
pub const MIN_SUPPORTED_VERSION: u8 = 0;
pub const MAX_SUPPORTED_VERSION: u8 = 0;
pub const MAGIC_BWT: [u8; 3] = [66, 87, 84];
pub const MAX_TOKEN_CHARS: usize = 4096;
pub const SECRET_KEY_BYTES: usize = 32;
pub const PUBLIC_KEY_BYTES: usize = 32;
pub const NONCE_BYTES: usize = 12;
pub const KID_BYTES: usize = 16;
pub const BASE64_KID_CHARS: usize = 24;
pub const HEADER_BYTES: usize = 48;
#[derive(Debug)]
pub enum BWTError {
InvalidMagicBytes,
InvalidHeaderBytes(usize),
UnsupportedVersion(u8),
Other(Box<dyn Error>),
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)]
#[repr(u8)]
pub enum Typ {
BWTv0,
}
#[derive(Clone, Debug)]
pub struct Header {
typ: Typ,
iat: u64,
exp: u64,
kid: [u8; KID_BYTES],
}
#[derive(Clone, Debug)]
pub struct Contents {
header: Header,
body: Value,
}
#[derive(Clone, Debug)]
pub struct KeyPair {
secret_key: [u8; SECRET_KEY_BYTES],
public_key: [u8; PUBLIC_KEY_BYTES],
kid: [u8; KID_BYTES],
}
#[derive(Clone, Debug)]
pub struct PeerPublicKey {
public_key: [u8; PUBLIC_KEY_BYTES],
kid: [u8; KID_BYTES],
}
#[derive(Clone, Debug)]
pub struct InternalPeerPublicKey {
public_key: [u8; PUBLIC_KEY_BYTES],
base64_kid: String,
}
#[derive(Clone, Debug)]
pub struct InternalHeader {
typ: Typ,
iat: u64,
exp: u64,
kid: [u8; KID_BYTES],
nonce: [u8; NONCE_BYTES],
base64_kid: String,
}
#[derive(Clone, Debug)]
struct PeerPublicKeyMap(HashMap<String, [u8; PUBLIC_KEY_BYTES]>);
impl Error for BWTError {
fn source(self: &BWTError) -> Option<&(dyn Error + 'static)> {
match self {
// The cause is the underlying implementation error type. Is implicitly
// cast to the trait object `&error::Error`. This works because the
// underlying type already implements the `Error` trait.
BWTError::Other(err) => Some(&**err),
_ => None,
}
}
}
impl fmt::Display for BWTError {
fn fmt(self: &BWTError, f: &mut fmt::Formatter) -> fmt::Result {
match self {
BWTError::InvalidMagicBytes => write!(f, "invalid magic bytes"),
BWTError::InvalidHeaderBytes(n) => write!(f, "invalid number of header bytes {}", n),
BWTError::UnsupportedVersion(version) => write!(f, "unsupported version {}", version),
BWTError::Other(err) => err.fmt(f),
}
}
}
impl From<TryFromPrimitiveError<Typ>> for BWTError {
fn from(err: TryFromPrimitiveError<Typ>) -> BWTError {
BWTError::Other(Box::new(err))
}
}
impl From<TryFromSliceError> for BWTError {
fn from(err: TryFromSliceError) -> BWTError {
BWTError::Other(Box::new(err))
}
}
impl TryFrom<&[u8]> for InternalHeader {
type Error = BWTError;
fn try_from(buf: &[u8]) -> Result<InternalHeader, BWTError> {
if buf.len() < HEADER_BYTES {
Err(BWTError::InvalidHeaderBytes(buf.len()))
} else {
Ok(InternalHeader {
typ: try_derive_typ(buf)?,
iat: u64::from_be_bytes(buf[4..12].try_into()?),
exp: u64::from_be_bytes(buf[12..20].try_into()?),
kid: buf[20..36].try_into()?,
nonce: buf[36..48].try_into()?,
base64_kid: base64_encode(&buf[20..36]),
})
}
}
}
impl TryFrom<(&Header, &[u8])> for InternalHeader {
type Error = BWTError;
fn try_from((header, nonce): (&Header, &[u8])) -> Result<InternalHeader, BWTError> {
Ok(InternalHeader {
typ: header.typ,
iat: header.iat,
exp: header.exp,
kid: header.kid,
nonce: nonce.try_into()?,
base64_kid: base64_encode(&header.kid),
})
}
}
impl Into<Header> for InternalHeader {
fn into(self: InternalHeader) -> Header {
Header {
typ: self.typ,
iat: self.iat,
exp: self.exp,
kid: self.kid,
}
}
}
impl Into<[u8; HEADER_BYTES]> for InternalHeader {
fn into(self: InternalHeader) -> [u8; HEADER_BYTES] {
let mut buf: [u8; HEADER_BYTES] = [0u8; HEADER_BYTES];
buf[0..3].copy_from_slice(&MAGIC_BWT);
buf[3] = self.typ.into();
buf[4..12].copy_from_slice(&self.iat.to_be_bytes());
buf[12..20].copy_from_slice(&self.exp.to_be_bytes());
buf[20..36].copy_from_slice(&self.kid);
buf[36..48].copy_from_slice(&self.nonce);
buf
}
}
impl From<&[PeerPublicKey]> for PeerPublicKeyMap {
fn from(peer_public_keys: &[PeerPublicKey]) -> PeerPublicKeyMap {
let mut map: HashMap<String, [u8; PUBLIC_KEY_BYTES]> = HashMap::new();
for peer_public_key in peer_public_keys.iter() {
map.insert(
base64_encode(&peer_public_key.kid[..]),
peer_public_key.public_key,
);
}
PeerPublicKeyMap(map)
}
}
impl Header {
fn is_valid(self: &Header) -> bool {
let typ: u8 = self.typ.into();
let now: u64 = match SystemTime::now().duration_since(UNIX_EPOCH) {
Ok(duration) => duration.as_millis() as u64,
_ => return false,
};
typ >= MIN_SUPPORTED_VERSION
&& typ <= MAX_SUPPORTED_VERSION
&& self.iat <= now
&& self.exp > now
}
}
impl InternalHeader {
fn is_valid(self: &InternalHeader) -> bool {
let typ: u8 = self.typ.into();
let now: u64 = match SystemTime::now().duration_since(UNIX_EPOCH) {
Ok(duration) => duration.as_millis() as u64,
_ => return false,
};
self.base64_kid.len() == BASE64_KID_CHARS
&& typ >= MIN_SUPPORTED_VERSION
&& typ <= MAX_SUPPORTED_VERSION
&& self.iat <= now
&& self.exp > now
}
}
fn try_derive_typ(buf: &[u8]) -> Result<Typ, BWTError> {
let version: u8 = buf[3];
let mut diff: u8 = 0;
for i in 0..3 {
diff |= buf[i] ^ MAGIC_BWT[i];
}
if diff != 0 {
Err(BWTError::InvalidMagicBytes)
} else if version < MIN_SUPPORTED_VERSION || version > MAX_SUPPORTED_VERSION {
Err(BWTError::UnsupportedVersion(version))
} else {
Ok(Typ::try_from(version)?)
}
}
fn concat_token(aad: &[u8], ciphertext: &[u8], tag: &[u8]) -> String {
format!(
"{}.{}.{}",
base64_encode(aad),
base64_encode(ciphertext),
base64_encode(tag)
)
}
// /** Creates a nonce generator that is based on the current timestamp. */
// function* createNonceGenerator(): Generator {
// let base: bigint = BigInt(String(Date.now()).slice(-NONCE_BYTES));
//
// for (;;) {
// yield encode(String(++base), "utf8");
// }
// }
//
// /** Transforms a collection of public keys to a map representation. */
// function toPublicKeyMap(
// ...peerPublicKeys: PeerPublicKey[]
// ): Map<string, Uint8Array> {
// const map: Map<string, Uint8Array> = new Map<string, Uint8Array>();
//
// for (const peerPublicKey of peerPublicKeys) {
// map.set(peerPublicKey.kid as string, peerPublicKey.publicKey as Uint8Array);
// }
//
// return map;
// }
//
// /** Concatenates aad, ciphertext, and tag to a token. */
// function assembleToken(
// aad: Uint8Array,
// ciphertext: Uint8Array,
// tag: Uint8Array
// ): string {
// return (
// decode(aad, "base64") +
// "." +
// decode(ciphertext, "base64") +
// "." +
// decode(tag, "base64")
// );
// }
//
// /** Whether given input is a valid BWT header object. */
// function isValidHeader(x: any): boolean {
// const now: number = Date.now();
// return (
// x &&
// SUPPORTED_VERSIONS.has(x.typ) &&
// x.kid &&
// x.kid.length === BASE64_KID_CHARS &&
// x.iat >= 0 &&
// x.iat % 1 === 0 &&
// x.iat <= now &&
// x.exp >= 0 &&
// x.exp % 1 === 0 &&
// x.exp > now
// );
// }
//
// /** Whether given input is a valid BWT secret key. */
// function isValidSecretKey(x: Uint8Array): boolean {
// return x && x.byteLength === SECRET_KEY_BYTES;
// }
//
// /**
// * Whether given input is a valid BWT peer public key.
// *
// * This function must be passed normalized peer public keys as it assumes a
// * buffer publicKey prop for the byte length check.
// */
// function isValidPeerPublicKey(x: PeerPublicKey): boolean {
// return (
// x &&
// x.kid &&
// x.kid.length === BASE64_KID_CHARS &&
// x.publicKey.length === PUBLIC_KEY_BYTES
// );
// }
//
// /** Whether given input string has a valid token size. */
// function hasValidTokenSize(x: string): boolean {
// return x && x.length <= MAX_TOKEN_CHARS;
// }
//
// /** Efficiently derives a shared key from recurring kid strings. */
// function deriveSharedKeyProto(
// secretKey: Uint8Array,
// sharedKeyCache: Map<string, Uint8Array>,
// defaultPublicKeyMap: Map<string, Uint8Array>,
// defaultKid: string,
// kid: string = defaultKid,
// ...peerPublicKeySpace: PeerPublicKey[]
// ): Uint8Array {
// if (sharedKeyCache.has(kid)) {
// return sharedKeyCache.get(kid);
// }
//
// let publicKey: Uint8Array;
//
// if (peerPublicKeySpace.length) {
// let peerPublicKey: PeerPublicKey = peerPublicKeySpace.find(
// ({ kid: _kid }: PeerPublicKey): boolean => _kid === kid
// );
//
// publicKey = peerPublicKey ? (peerPublicKey.publicKey as Uint8Array) : null;
// } else if (defaultPublicKeyMap.has(kid)) {
// publicKey = defaultPublicKeyMap.get(kid);
// }
//
// if (!publicKey) {
// return null;
// }
//
// const sharedKey: Uint8Array = CURVE25519.scalarMult(secretKey, publicKey);
//
// sharedKeyCache.set(kid, sharedKey);
//
// return sharedKey;
// }
#[cfg(test)]
mod tests;
|
pub mod op_def;
pub mod attr_value;
pub mod types;
pub mod tensor_shape;
pub mod tensor;
pub mod resource_handle;
|
#[macro_use]
extern crate stdweb;
extern crate rand;
use rand::Rng;
static FOOD_PLACES: [&str; 16] = [
"Bar Burrito",
// "Bento Ya",
"Dough",
"Herbivore Kitchen",
"Hula",
"Let us eat",
// "Maki and Ramen",
"Mama's",
"Pizza Express",
"Pumpkin Brown",
"Redbox",
"Söderberg",
"Taquito",
"The Red Squirrel",
"Ting Thai Caravan",
"Wagamama",
"Wildman Wood",
"Zizzi's",
];
fn choose() -> String {
let mut rng = rand::thread_rng();
let choice = rng.choose(&FOOD_PLACES).unwrap();
String::from("blah")
}
fn main() {
stdweb::initialize();
js! {
Module.exports.choose = @{choose};
}
}
|
#[doc = "Register `RCC_TZCR` reader"]
pub type R = crate::R<RCC_TZCR_SPEC>;
#[doc = "Register `RCC_TZCR` writer"]
pub type W = crate::W<RCC_TZCR_SPEC>;
#[doc = "Field `TZEN` reader - TZEN"]
pub type TZEN_R = crate::BitReader;
#[doc = "Field `TZEN` writer - TZEN"]
pub type TZEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `MCKPROT` reader - MCKPROT"]
pub type MCKPROT_R = crate::BitReader;
#[doc = "Field `MCKPROT` writer - MCKPROT"]
pub type MCKPROT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - TZEN"]
#[inline(always)]
pub fn tzen(&self) -> TZEN_R {
TZEN_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - MCKPROT"]
#[inline(always)]
pub fn mckprot(&self) -> MCKPROT_R {
MCKPROT_R::new(((self.bits >> 1) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - TZEN"]
#[inline(always)]
#[must_use]
pub fn tzen(&mut self) -> TZEN_W<RCC_TZCR_SPEC, 0> {
TZEN_W::new(self)
}
#[doc = "Bit 1 - MCKPROT"]
#[inline(always)]
#[must_use]
pub fn mckprot(&mut self) -> MCKPROT_W<RCC_TZCR_SPEC, 1> {
MCKPROT_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "This register is used to switch the RCC into secure mode. This register can only be accessed in secure mode.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rcc_tzcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rcc_tzcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct RCC_TZCR_SPEC;
impl crate::RegisterSpec for RCC_TZCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`rcc_tzcr::R`](R) reader structure"]
impl crate::Readable for RCC_TZCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`rcc_tzcr::W`](W) writer structure"]
impl crate::Writable for RCC_TZCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets RCC_TZCR to value 0x03"]
impl crate::Resettable for RCC_TZCR_SPEC {
const RESET_VALUE: Self::Ux = 0x03;
}
|
pub fn run() {
println!("\n====3.16 MATCH====");
let country_code = 44;
// TODO: Try to use another country codes
// let country_code = 55;
// let country_code = 2000;
let country = match country_code {
44 => "UK",
46 => "Sweeden",
7 => "Russia",
1..=1000 => "unknown",
_ => "invalid"
};
println!("The coutry with code {} is {}", country_code, country);
} |
use crate::*;
pub struct PartitionedDisk<D: Disk> {
raw_disk: D,
layout: DiskLayout,
}
impl<D: Disk> core::ops::Deref for PartitionedDisk<D> {
type Target = D;
fn deref(&self) -> &Self::Target {
&self.raw_disk
}
}
impl<D: Disk> PartitionedDisk<D> {
pub fn new(raw_disk: D) -> Result<Self> {
let layout = DiskLayout::read(&raw_disk)?;
Ok(Self { raw_disk, layout })
}
pub fn layout(&self) -> &DiskLayout {
&self.layout
}
pub fn partitions(&self) -> Partitions<'_, D> {
Partitions {
disk: &self,
iter: self.layout.partitions(),
}
}
}
pub struct Partitions<'d, D: Disk + 'd> {
disk: &'d PartitionedDisk<D>,
iter: DiskLayoutParts<'d>,
}
impl<'d, D: Disk + 'd> core::iter::Iterator for Partitions<'d, D> {
type Item = Partition<'d, D>;
fn next(&mut self) -> core::option::Option<Self::Item> {
self.iter.next().map(|info| Partition::new(&self.disk.raw_disk, info))
}
}
impl<'d, D: Disk + 'd> core::iter::ExactSizeIterator for Partitions<'d, D> {
fn len(&self) -> usize {
self.iter.len()
}
}
|
use amethyst::{
input::{InputHandler, StringBindings},
derive::SystemDesc,
ecs::{Read, System, SystemData},
};
#[derive(SystemDesc)]
pub struct InputSystem;
impl <'s> System<'s> for InputSystem {
type SystemData = Read<'s, InputHandler<StringBindings>>;
fn run(&mut self, input: Self:: SystemData) {
if input.action_is_down("Quit").unwrap_or(false) {
std::process::exit(0);
}
}
}
|
use crate::{
neighbour_table::{nlas::NeighbourTableNla, NeighbourTableBuffer, NeighbourTableHeader},
traits::{Emitable, Parseable},
DecodeError,
};
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct NeighbourTableMessage {
pub header: NeighbourTableHeader,
pub nlas: Vec<NeighbourTableNla>,
}
impl Emitable for NeighbourTableMessage {
fn buffer_len(&self) -> usize {
self.header.buffer_len() + self.nlas.as_slice().buffer_len()
}
fn emit(&self, buffer: &mut [u8]) {
self.header.emit(buffer);
self.nlas.as_slice().emit(buffer);
}
}
impl<'buffer, T: AsRef<[u8]> + 'buffer> Parseable<NeighbourTableMessage>
for NeighbourTableBuffer<&'buffer T>
{
fn parse(&self) -> Result<NeighbourTableMessage, DecodeError> {
Ok(NeighbourTableMessage {
header: self.parse()?,
nlas: self.parse()?,
})
}
}
impl<'buffer, T: AsRef<[u8]> + 'buffer> Parseable<Vec<NeighbourTableNla>>
for NeighbourTableBuffer<&'buffer T>
{
fn parse(&self) -> Result<Vec<NeighbourTableNla>, DecodeError> {
let mut nlas = vec![];
for nla_buf in self.nlas() {
nlas.push(nla_buf?.parse()?);
}
Ok(nlas)
}
}
|
use serde::Deserialize;
use serde::Serialize;
use crate::common::deserialize_as_u64_from_number_or_string;
#[derive(Clone, Debug, Default, PartialEq, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct NetworkFeeStats {
#[serde(rename = "1")]
pub core: CoreFeeStats,
#[serde(rename = "2")]
#[serde(skip_serializing_if = "Option::is_none")]
pub magistrate: Option<MagistrateFeeStats>,
}
#[derive(Clone, Debug, Default, PartialEq, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct FeeStatistics {
#[serde(deserialize_with = "deserialize_as_u64_from_number_or_string")]
pub avg: u64,
#[serde(deserialize_with = "deserialize_as_u64_from_number_or_string")]
pub min: u64,
#[serde(deserialize_with = "deserialize_as_u64_from_number_or_string")]
pub max: u64,
#[serde(deserialize_with = "deserialize_as_u64_from_number_or_string")]
pub sum: u64,
}
#[derive(Clone, Debug, Default, PartialEq, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CoreFeeStats {
#[serde(skip_serializing_if = "Option::is_none")]
pub transfer: Option<FeeStatistics>,
#[serde(skip_serializing_if = "Option::is_none")]
pub second_signature: Option<FeeStatistics>,
#[serde(skip_serializing_if = "Option::is_none")]
pub delegate_registration: Option<FeeStatistics>,
#[serde(skip_serializing_if = "Option::is_none")]
pub vote: Option<FeeStatistics>,
#[serde(skip_serializing_if = "Option::is_none")]
pub multi_signature: Option<FeeStatistics>,
#[serde(skip_serializing_if = "Option::is_none")]
pub ipfs: Option<FeeStatistics>,
#[serde(skip_serializing_if = "Option::is_none")]
pub multi_payment: Option<FeeStatistics>,
#[serde(skip_serializing_if = "Option::is_none")]
pub delegate_resignation: Option<FeeStatistics>,
#[serde(skip_serializing_if = "Option::is_none")]
pub htlc_lock: Option<FeeStatistics>,
#[serde(skip_serializing_if = "Option::is_none")]
pub htlc_claim: Option<FeeStatistics>,
#[serde(skip_serializing_if = "Option::is_none")]
pub htlc_refund: Option<FeeStatistics>,
}
#[derive(Clone, Debug, Default, PartialEq, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct MagistrateFeeStats {
#[serde(skip_serializing_if = "Option::is_none")]
pub business_registration: Option<FeeStatistics>,
#[serde(skip_serializing_if = "Option::is_none")]
pub business_resignation: Option<FeeStatistics>,
#[serde(skip_serializing_if = "Option::is_none")]
pub business_update: Option<FeeStatistics>,
#[serde(skip_serializing_if = "Option::is_none")]
pub bridgechain_registration: Option<FeeStatistics>,
#[serde(skip_serializing_if = "Option::is_none")]
pub bridgechain_resignation: Option<FeeStatistics>,
#[serde(skip_serializing_if = "Option::is_none")]
pub bridgechain_update: Option<FeeStatistics>,
}
|
/*
* Slack Web API
*
* One way to interact with the Slack platform is its HTTP RPC-based Web API, a collection of methods requiring OAuth 2.0-based user, bot, or workspace tokens blessed with related OAuth scopes.
*
* The version of the OpenAPI document: 1.7.0
*
* Generated by: https://openapi-generator.tech
*/
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ObjsBotProfile {
#[serde(rename = "app_id")]
pub app_id: String,
#[serde(rename = "deleted")]
pub deleted: bool,
#[serde(rename = "icons")]
pub icons: Box<crate::models::ObjsBotProfileIcons>,
#[serde(rename = "id")]
pub id: String,
#[serde(rename = "name")]
pub name: String,
#[serde(rename = "team_id")]
pub team_id: String,
#[serde(rename = "updated")]
pub updated: i32,
}
impl ObjsBotProfile {
pub fn new(app_id: String, deleted: bool, icons: crate::models::ObjsBotProfileIcons, id: String, name: String, team_id: String, updated: i32) -> ObjsBotProfile {
ObjsBotProfile {
app_id,
deleted,
icons: Box::new(icons),
id,
name,
team_id,
updated,
}
}
}
|
#[doc = "Register `MMCRGUFCR` reader"]
pub type R = crate::R<MMCRGUFCR_SPEC>;
#[doc = "Field `RGUFC` reader - Received good unicast frames counter"]
pub type RGUFC_R = crate::FieldReader<u32>;
impl R {
#[doc = "Bits 0:31 - Received good unicast frames counter"]
#[inline(always)]
pub fn rgufc(&self) -> RGUFC_R {
RGUFC_R::new(self.bits)
}
}
#[doc = "MMC received good unicast frames counter register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mmcrgufcr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MMCRGUFCR_SPEC;
impl crate::RegisterSpec for MMCRGUFCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`mmcrgufcr::R`](R) reader structure"]
impl crate::Readable for MMCRGUFCR_SPEC {}
#[doc = "`reset()` method sets MMCRGUFCR to value 0"]
impl crate::Resettable for MMCRGUFCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#![feature(slice_rotate)]
type Knot = Vec<Increment>;
type Length = usize;
type Increment = usize;
type Skip = usize;
type Index = usize;
type DenseHash = String;
fn main() {
let input = "46,41,212,83,1,255,157,65,139,52,39,254,2,86,0,204";
// Answer # 1
let lengths_numeric = lengths_numeric(input);
let mut knot: Knot = (0..256).collect();
tie_knot(&mut knot, lengths_numeric, 1);
println!("Answer #1: {:?}", knot_product(knot));
// Answer # 2
let lengths_ascii = lengths_ascii(input);
let mut knot: Knot = (0..256).collect();
tie_knot(&mut knot, lengths_ascii, 64);
println!("Answer #2: {:?}", dense_hash(&knot));
}
fn tie_knot(knot: &mut Knot, lengths: Vec<Length>, magnitude: usize) -> () {
let mut skip: Skip = 0;
let mut index: Index = 0;
let knot_len = knot.len();
for _ in 0..magnitude {
for length in &lengths {
if *length > knot_len {
continue;
}
reverse_slice(index, *length, knot);
index += length;
index += skip;
index %= knot_len;
skip += 1;
}
}
}
fn reverse_slice(index: Index, length: Increment, knot: &mut Knot) -> () {
let knot_length = knot.len();
knot.rotate(index);
{
let z = &mut knot[0..length];
z.reverse();
}
knot.rotate(knot_length - index);
}
fn lengths_numeric(input: &str) -> Vec<Length> {
input
.split(',')
.map(|x| usize::from_str_radix(x, 10).unwrap())
.collect()
}
fn lengths_ascii(input: &str) -> Vec<Length> {
let mut ascii_codepoints: Vec<usize> = input.chars().map(|x| x as usize).collect();
let mut append_codepoints = vec![17, 31, 73, 47, 23];
ascii_codepoints.append(&mut append_codepoints);
ascii_codepoints
}
fn knot_product(knot: Knot) -> isize {
knot.clone().into_iter().take(2).fold(
1,
|acc, x| acc * x as isize,
)
}
fn dense_hash(knot: &Knot) -> DenseHash {
let xor_chunks = knot.chunks(16);
let mut xor: Vec<usize> = vec![];
for chunk in xor_chunks {
let x = chunk.iter().fold(0, |acc, x| acc ^ x);
xor.push(x);
}
let dense_hash = xor.iter()
.map(|x| String::from(format!("{:x}", x)))
.map(|x| if x.len() == 1 { format!("0{}", x) } else { x })
.collect::<Vec<String>>()
.join("");
dense_hash
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_hash_knot() {
let lengths = "3,4,1,5"
.split(',')
.map(|x| usize::from_str_radix(x, 10).unwrap())
.collect();
let mut knot: Knot = (0..5).collect();
tie_knot(&mut knot, lengths, 1);
assert_eq!(vec![3, 4, 2, 1, 0], knot);
assert_eq!(12, knot_product(knot));
}
#[test]
fn test_to_ascii_codepoints() {
let input = "1,2,3";
assert_eq!(
vec![49, 44, 50, 44, 51, 17, 31, 73, 47, 23],
lengths_ascii(input)
);
}
#[test]
fn test_dense_hash_empty() {
let mut knot: Knot = (0..256).collect();
let lengths = lengths_ascii("");
tie_knot(&mut knot, lengths, 64);
assert_eq!("a2582a3a0e66e6e86e3812dcb672a272", dense_hash(&knot));
}
#[test]
fn test_dense_hash_aoc() {
let mut knot: Knot = (0..256).collect();
let lengths = lengths_ascii("AoC 2017");
tie_knot(&mut knot, lengths, 64);
assert_eq!("33efeb34ea91902bb2f59c9920caa6cd", dense_hash(&knot));
}
}
|
extern crate proc_macro;
extern crate syn;
#[macro_use]
extern crate quote;
use proc_macro::TokenStream;
mod deserialize;
mod helpers;
mod serialize;
#[proc_macro_derive(Deserialize)]
pub fn deserialize_macro_derive(input: TokenStream) -> TokenStream {
deserialize::deserialize_macro_derive(input)
}
#[proc_macro_derive(Serialize)]
pub fn serialize_macro_derive(input: TokenStream) -> TokenStream {
serialize::serialize_macro_derive(input)
}
|
use cocoa::base::id;
use std::mem;
use std::ops::{Deref, DerefMut};
use {CommandEncoder, StoreAction, RenderCommandEncoder};
#[derive(Debug)]
pub struct ParallelRenderCommandEncoder(id);
unsafe impl Send for ParallelRenderCommandEncoder {}
impl Deref for ParallelRenderCommandEncoder {
type Target = CommandEncoder;
fn deref(&self) -> &Self::Target {
unsafe { mem::transmute(&self.0) }
}
}
impl DerefMut for ParallelRenderCommandEncoder {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { mem::transmute(&mut self.0) }
}
}
impl ParallelRenderCommandEncoder {
pub fn create_render_command_encoder(&self) -> RenderCommandEncoder {
unimplemented!()
}
pub fn set_color_store_action_at(&self, store_action: StoreAction, index: usize) {
unimplemented!()
}
pub fn set_depth_store_action(self, store_action: StoreAction) {
unimplemented!()
}
pub fn set_stencil_store_action(self, store_action: StoreAction) {
unimplemented!()
}
}
impl_from_into_raw!(ParallelRenderCommandEncoder, of protocol "MTLRenderCommandEncoder");
|
#[doc = "Reader of register DDRPHYC_ODTCR"]
pub type R = crate::R<u32, super::DDRPHYC_ODTCR>;
#[doc = "Writer for register DDRPHYC_ODTCR"]
pub type W = crate::W<u32, super::DDRPHYC_ODTCR>;
#[doc = "Register DDRPHYC_ODTCR `reset()`'s with value 0x8421_0000"]
impl crate::ResetValue for super::DDRPHYC_ODTCR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x8421_0000
}
}
#[doc = "Reader of field `RDODT0`"]
pub type RDODT0_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `RDODT0`"]
pub struct RDODT0_W<'a> {
w: &'a mut W,
}
impl<'a> RDODT0_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);
self.w
}
}
#[doc = "Reader of field `RDODT1`"]
pub type RDODT1_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `RDODT1`"]
pub struct RDODT1_W<'a> {
w: &'a mut W,
}
impl<'a> RDODT1_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 4)) | (((value as u32) & 0x0f) << 4);
self.w
}
}
#[doc = "Reader of field `RDODT2`"]
pub type RDODT2_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `RDODT2`"]
pub struct RDODT2_W<'a> {
w: &'a mut W,
}
impl<'a> RDODT2_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 8)) | (((value as u32) & 0x0f) << 8);
self.w
}
}
#[doc = "Reader of field `RDODT3`"]
pub type RDODT3_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `RDODT3`"]
pub struct RDODT3_W<'a> {
w: &'a mut W,
}
impl<'a> RDODT3_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 12)) | (((value as u32) & 0x0f) << 12);
self.w
}
}
#[doc = "Reader of field `WRODT0`"]
pub type WRODT0_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `WRODT0`"]
pub struct WRODT0_W<'a> {
w: &'a mut W,
}
impl<'a> WRODT0_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 16)) | (((value as u32) & 0x0f) << 16);
self.w
}
}
#[doc = "Reader of field `WRODT1`"]
pub type WRODT1_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `WRODT1`"]
pub struct WRODT1_W<'a> {
w: &'a mut W,
}
impl<'a> WRODT1_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 20)) | (((value as u32) & 0x0f) << 20);
self.w
}
}
#[doc = "Reader of field `WRODT2`"]
pub type WRODT2_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `WRODT2`"]
pub struct WRODT2_W<'a> {
w: &'a mut W,
}
impl<'a> WRODT2_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 24)) | (((value as u32) & 0x0f) << 24);
self.w
}
}
#[doc = "Reader of field `WRODT3`"]
pub type WRODT3_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `WRODT3`"]
pub struct WRODT3_W<'a> {
w: &'a mut W,
}
impl<'a> WRODT3_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 28)) | (((value as u32) & 0x0f) << 28);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - RDODT0"]
#[inline(always)]
pub fn rdodt0(&self) -> RDODT0_R {
RDODT0_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:7 - RDODT1"]
#[inline(always)]
pub fn rdodt1(&self) -> RDODT1_R {
RDODT1_R::new(((self.bits >> 4) & 0x0f) as u8)
}
#[doc = "Bits 8:11 - RDODT2"]
#[inline(always)]
pub fn rdodt2(&self) -> RDODT2_R {
RDODT2_R::new(((self.bits >> 8) & 0x0f) as u8)
}
#[doc = "Bits 12:15 - RDODT3"]
#[inline(always)]
pub fn rdodt3(&self) -> RDODT3_R {
RDODT3_R::new(((self.bits >> 12) & 0x0f) as u8)
}
#[doc = "Bits 16:19 - WRODT0"]
#[inline(always)]
pub fn wrodt0(&self) -> WRODT0_R {
WRODT0_R::new(((self.bits >> 16) & 0x0f) as u8)
}
#[doc = "Bits 20:23 - WRODT1"]
#[inline(always)]
pub fn wrodt1(&self) -> WRODT1_R {
WRODT1_R::new(((self.bits >> 20) & 0x0f) as u8)
}
#[doc = "Bits 24:27 - WRODT2"]
#[inline(always)]
pub fn wrodt2(&self) -> WRODT2_R {
WRODT2_R::new(((self.bits >> 24) & 0x0f) as u8)
}
#[doc = "Bits 28:31 - WRODT3"]
#[inline(always)]
pub fn wrodt3(&self) -> WRODT3_R {
WRODT3_R::new(((self.bits >> 28) & 0x0f) as u8)
}
}
impl W {
#[doc = "Bits 0:3 - RDODT0"]
#[inline(always)]
pub fn rdodt0(&mut self) -> RDODT0_W {
RDODT0_W { w: self }
}
#[doc = "Bits 4:7 - RDODT1"]
#[inline(always)]
pub fn rdodt1(&mut self) -> RDODT1_W {
RDODT1_W { w: self }
}
#[doc = "Bits 8:11 - RDODT2"]
#[inline(always)]
pub fn rdodt2(&mut self) -> RDODT2_W {
RDODT2_W { w: self }
}
#[doc = "Bits 12:15 - RDODT3"]
#[inline(always)]
pub fn rdodt3(&mut self) -> RDODT3_W {
RDODT3_W { w: self }
}
#[doc = "Bits 16:19 - WRODT0"]
#[inline(always)]
pub fn wrodt0(&mut self) -> WRODT0_W {
WRODT0_W { w: self }
}
#[doc = "Bits 20:23 - WRODT1"]
#[inline(always)]
pub fn wrodt1(&mut self) -> WRODT1_W {
WRODT1_W { w: self }
}
#[doc = "Bits 24:27 - WRODT2"]
#[inline(always)]
pub fn wrodt2(&mut self) -> WRODT2_W {
WRODT2_W { w: self }
}
#[doc = "Bits 28:31 - WRODT3"]
#[inline(always)]
pub fn wrodt3(&mut self) -> WRODT3_W {
WRODT3_W { w: self }
}
}
|
use crate::fontinfo::*;
use crate::utils::adjust_offset;
use fonttools::cmap;
use fonttools::font;
use fonttools::font::Font;
use fonttools::font::Table;
use fonttools::glyf;
use fonttools::head::head;
use fonttools::hhea;
use fonttools::hmtx;
use fonttools::maxp::maxp;
use fonttools::name::{name, NameRecord, NameRecordID};
use fonttools::os2::os2;
use fonttools::post::post;
use fonttools::utils::int_list_to_num;
use std::collections::BTreeMap;
use std::convert::TryInto;
pub fn compile_head(info: &norad::FontInfo, glyf: &glyf::glyf) -> head {
let mut minor = info.version_minor.unwrap_or(0);
while minor > 999 {
minor /= 10;
}
let font_revision: f32 =
(info.version_major.unwrap_or(1) as f32 * 1000.0 + minor as f32).round() / 1000.0;
// bounding box
let bounds: Vec<(i16, i16, i16, i16)> = glyf
.glyphs
.iter()
.map(|x| (x.xMin, x.xMax, x.yMin, x.yMax))
.collect();
let mut head_table = head::new(
font_revision,
info.units_per_em.map_or(1000, |f| f.get() as u16),
bounds.iter().map(|x| x.0).min().unwrap_or(0), /* xmin */
bounds.iter().map(|x| x.2).min().unwrap_or(0), /* ymin */
bounds.iter().map(|x| x.1).max().unwrap_or(0), /* xmax */
bounds.iter().map(|x| x.3).max().unwrap_or(0), /* ymax */
);
// dates (modified is set to now by default)
if info.open_type_head_created.is_some() {
if let Ok(date) = chrono::NaiveDateTime::parse_from_str(
&info.open_type_head_created.as_ref().unwrap(),
"%Y/%m/%d %H:%M:%S",
) {
head_table.created = date
} else {
log::warn!(
"Couldn't parse created date {:?}",
info.open_type_head_created
)
}
}
// mac style
if let Some(lowest_rec_ppm) = info.open_type_head_lowest_rec_ppem {
head_table.lowestRecPPEM = lowest_rec_ppm as u16;
}
// misc
if let Some(flags) = &info.open_type_head_flags {
head_table.flags = int_list_to_num(flags) as u16;
}
head_table
}
pub fn compile_post(info: &norad::FontInfo, names: &[String]) -> post {
let upm = info.units_per_em.map_or(1000.0, |f| f.get());
post::new(
2.0,
info.italic_angle.map_or(0.0, |f| f.get() as f32),
info.postscript_underline_position
.map_or_else(|| upm * -0.075, |f| f.get()) as i16,
postscript_underline_thickness(info),
info.postscript_is_fixed_pitch.unwrap_or(false),
Some(names.to_vec()),
)
}
pub fn compile_cmap(mapping: BTreeMap<u32, u16>) -> cmap::cmap {
cmap::cmap {
subtables: vec![
cmap::CmapSubtable {
format: 4,
platformID: 0,
encodingID: 3,
languageID: 0,
mapping: mapping.clone(),
},
cmap::CmapSubtable {
format: 4,
platformID: 3,
encodingID: 1,
languageID: 0,
mapping,
},
],
}
}
pub fn compile_hhea(
info: &norad::FontInfo,
metrics: &[hmtx::Metric],
glyf: &glyf::glyf,
) -> hhea::hhea {
hhea::hhea {
majorVersion: 1,
minorVersion: 0,
ascender: hhea_ascender(info),
descender: hhea_descender(info),
lineGap: info.open_type_hhea_line_gap.unwrap_or(0) as i16,
advanceWidthMax: metrics.iter().map(|x| x.advanceWidth).max().unwrap_or(0),
minLeftSideBearing: metrics.iter().map(|x| x.lsb).min().unwrap_or(0),
minRightSideBearing: metrics
.iter()
.map(|x| x.advanceWidth as i16)
.zip(glyf.glyphs.iter().map(|g| g.xMax))
.map(|t| t.0 - t.1)
.min()
.unwrap_or(0),
xMaxExtent: glyf.glyphs.iter().map(|g| g.xMax).max().unwrap_or(0),
caretSlopeRise: 1, // XXX
caretSlopeRun: 0, // XXX
caretOffset: info.open_type_hhea_caret_offset.unwrap_or(0) as i16,
reserved0: 0,
reserved1: 0,
reserved2: 0,
reserved3: 0,
metricDataFormat: 0,
numberOfHMetrics: 0,
}
}
pub fn compile_os2(
info: &norad::FontInfo,
metrics: &[hmtx::Metric],
_glyf: &glyf::glyf,
mapping: &BTreeMap<u32, u16>,
) -> os2 {
let upm = info.units_per_em.map_or(1000.0, |f| f.get());
let italic_angle = info.italic_angle.map_or(0.0, |f| f.get());
let x_height = info.x_height.map_or(upm * 0.5, |f| f.get());
let subscript_y_offset = info
.open_type_os2_subscript_y_offset
.unwrap_or((upm * 0.075).round() as i32) as i16;
let font_ascender = ascender(info);
let font_descender = descender(info);
let s_typo_ascender = info
.open_type_os2_typo_ascender
.unwrap_or_else(|| font_ascender.into()) as i16;
let s_typo_descender = info
.open_type_os2_typo_descender
.unwrap_or_else(|| font_descender.into()) as i16;
let s_typo_line_gap =
info.open_type_hhea_line_gap
.unwrap_or((upm * 1.2) as i32 + (font_ascender - font_descender) as i32) as i16;
let superscript_y_offset = info
.open_type_os2_superscript_y_offset
.unwrap_or((upm * 0.35).round() as i32) as i16;
let subscript_x_size = info
.open_type_os2_subscript_x_size
.unwrap_or((upm * 0.65).round() as i32) as i16;
let mut table = os2 {
version: 4,
xAvgCharWidth: (metrics.iter().map(|m| m.advanceWidth as f32).sum::<f32>()
/ metrics.iter().filter(|m| m.advanceWidth != 0).count() as f32)
.round() as i16,
usWeightClass: info.open_type_os2_weight_class.unwrap_or(400) as u16,
usWidthClass: info.open_type_os2_width_class.map_or(5, |f| f as u16),
fsType: int_list_to_num(&info.open_type_os2_type.as_ref().unwrap_or(&vec![2])) as u16,
ySubscriptXSize: subscript_x_size,
ySubscriptYSize: info
.open_type_os2_subscript_y_size
.unwrap_or((upm * 0.6).round() as i32) as i16,
ySubscriptYOffset: subscript_y_offset,
ySubscriptXOffset: info
.open_type_os2_subscript_x_offset
.unwrap_or_else(|| adjust_offset(-subscript_y_offset, italic_angle))
as i16,
ySuperscriptXSize: info
.open_type_os2_superscript_x_size
.unwrap_or((upm * 0.65).round() as i32) as i16,
ySuperscriptYSize: info
.open_type_os2_superscript_y_size
.unwrap_or((upm * 0.6).round() as i32) as i16,
ySuperscriptYOffset: superscript_y_offset,
ySuperscriptXOffset: info
.open_type_os2_superscript_x_offset
.unwrap_or_else(|| adjust_offset(-superscript_y_offset, italic_angle))
as i16,
yStrikeoutSize: info
.open_type_os2_strikeout_size
.unwrap_or_else(|| postscript_underline_thickness(info).into())
as i16,
yStrikeoutPosition: info
.open_type_os2_strikeout_position
.unwrap_or((x_height * 0.22) as i32) as i16,
sxHeight: Some(x_height as i16),
achVendID: info
.open_type_os2_vendor_id
.as_ref()
.map_or(*b"NONE", |x| x.as_bytes().try_into().unwrap()),
sCapHeight: Some(info.cap_height.map_or(upm * 0.7, |f| f.get()) as i16),
sTypoAscender: s_typo_ascender,
sTypoDescender: s_typo_descender,
sTypoLineGap: s_typo_line_gap,
usWinAscent: info
.open_type_os2_win_ascent
.unwrap_or_else(|| (font_ascender + s_typo_line_gap).try_into().unwrap())
as u16,
usWinDescent: info
.open_type_os2_win_descent
.unwrap_or(font_descender.abs() as u32) as u16,
usBreakChar: Some(32),
usMaxContext: Some(0),
usDefaultChar: Some(0),
// sFamilyClass: info.open_type_os2_family_class... (not public)
sFamilyClass: 0,
panose: get_panose(info),
ulCodePageRange1: Some(0),
ulCodePageRange2: Some(0),
ulUnicodeRange1: 0b10100001000000000000000011111111, // XXX
ulUnicodeRange2: 0, // XXX
ulUnicodeRange3: 0, // XXX
ulUnicodeRange4: 0, // XXX
usFirstCharIndex: *mapping.keys().min().unwrap_or(&0xFFFF) as u16,
usLastCharIndex: *mapping.keys().max().unwrap_or(&0xFFFF) as u16,
usLowerOpticalPointSize: None,
usUpperOpticalPointSize: None,
fsSelection: get_selection(info),
};
if let Some(page_ranges) = info.open_type_os2_code_page_ranges.as_ref() {
table.int_list_to_code_page_ranges(page_ranges);
} else {
table.calc_code_page_ranges(&mapping);
}
table
}
pub fn compile_name(info: &norad::FontInfo) -> name {
let mut name = name { records: vec![] };
/* Ideally...
if let Some(records) = &info.open_type_name_records {
for record in records {
name.records.push(NameRecord {
nameID: record.name_id as u16,
platformID: record.platform_id as u16,
encodingID: record.encoding_id as u16,
languageID: record.language_id as u16,
string: record.string,
})
}
}
*/
let mut records: Vec<(NameRecordID, String)> = vec![];
if let Some(copyright) = &info.copyright {
records.push((NameRecordID::Copyright, copyright.to_string()));
}
let family_name = style_map_family_name(info);
let style_name = style_map_style_name(info);
let pfn = preferred_family_name(info);
let psfn = preferred_subfamily_name(info);
records.extend(vec![
(NameRecordID::FontFamilyName, family_name.clone()),
(NameRecordID::FontSubfamilyName, style_name.clone()),
(NameRecordID::UniqueID, unique_id(info)),
(NameRecordID::FullFontName, format!("{0} {1}", pfn, psfn)),
(NameRecordID::Version, name_version(info)),
(NameRecordID::PostscriptName, postscript_font_name(info)),
]);
for (id, field) in &[
(NameRecordID::Trademark, &info.trademark),
(
NameRecordID::Manufacturer,
&info.open_type_name_manufacturer,
),
(NameRecordID::Designer, &info.open_type_name_designer),
(NameRecordID::Description, &info.open_type_name_description),
(
NameRecordID::ManufacturerURL,
&info.open_type_name_manufacturer_url,
),
(NameRecordID::DesignerURL, &info.open_type_name_designer_url),
(NameRecordID::License, &info.open_type_name_license),
(NameRecordID::LicenseURL, &info.open_type_name_license_url),
] {
if let Some(value) = field {
records.push((*id, value.to_string()));
}
}
if pfn != family_name {
records.push((NameRecordID::PreferredFamilyName, pfn));
}
if psfn != style_name {
records.push((NameRecordID::PreferredSubfamilyName, psfn));
}
for (id, field) in &[
(
NameRecordID::CompatibleFullName,
&info.open_type_name_compatible_full_name,
),
(NameRecordID::SampleText, &info.open_type_name_sample_text),
(
NameRecordID::WWSFamilyName,
&info.open_type_name_wws_family_name,
),
(
NameRecordID::WWSSubfamilyName,
&info.open_type_name_wws_subfamily_name,
),
] {
if let Some(value) = field {
records.push((*id, value.to_string()));
}
}
for (id, string) in records {
name.records.push(NameRecord::windows_unicode(id, string));
}
name
}
pub fn fill_tables(
info: &norad::FontInfo,
glyf_table: glyf::glyf,
metrics: Vec<hmtx::Metric>,
names: Vec<String>,
mapping: BTreeMap<u32, u16>,
) -> Font {
let mut font = Font::new(font::SfntVersion::TrueType);
let head_table = compile_head(info, &glyf_table);
let post_table = compile_post(info, &names);
let (
num_glyphs,
max_points,
max_contours,
max_composite_points,
max_composite_contours,
max_component_elements,
max_component_depth,
) = glyf_table.maxp_statistics();
let maxp_table = maxp::new10(
num_glyphs,
max_points,
max_contours,
max_composite_points,
max_composite_contours,
max_component_elements,
max_component_depth,
);
let os2_table = compile_os2(info, &metrics, &glyf_table, &mapping);
let cmap_table = compile_cmap(mapping);
let name_table = compile_name(info);
let mut hhea_table = compile_hhea(info, &metrics, &glyf_table);
let hmtx_table = hmtx::hmtx { metrics };
let (hmtx_bytes, num_h_metrics) = hmtx_table.to_bytes();
hhea_table.numberOfHMetrics = num_h_metrics;
font.tables.insert(*b"head", Table::Head(head_table));
font.tables.insert(*b"hhea", Table::Hhea(hhea_table));
font.tables.insert(*b"maxp", Table::Maxp(maxp_table));
font.tables.insert(*b"OS/2", Table::Os2(os2_table));
font.tables.insert(*b"hmtx", Table::Unknown(hmtx_bytes));
font.tables.insert(*b"cmap", Table::Cmap(cmap_table));
font.tables.insert(*b"loca", Table::Unknown(vec![0]));
font.tables.insert(*b"glyf", Table::Glyf(glyf_table));
font.tables.insert(*b"name", Table::Name(name_table));
font.tables.insert(*b"post", Table::Post(post_table));
font
}
|
use quick_xml::{XmlReader, XmlWriter, Element, Event};
use quick_xml::error::Error as XmlError;
use fromxml::FromXml;
use toxml::{ToXml, XmlWriterExt};
use error::Error;
/// A representation of the `<image>` element.
#[derive(Debug, Default, Clone, PartialEq)]
pub struct Image {
/// The URL of the channel image.
pub url: String,
/// A description of the image. This is used in the HTML `alt` attribute.
pub title: String,
/// The URL that the image links to.
pub link: String,
/// The width of the image.
pub width: Option<String>,
/// The height of the image.
pub height: Option<String>,
/// The text for the HTML `title` attribute.
pub description: Option<String>,
}
impl FromXml for Image {
fn from_xml<R: ::std::io::BufRead>(mut reader: XmlReader<R>,
_: Element)
-> Result<(Self, XmlReader<R>), Error> {
let mut url = None;
let mut title = None;
let mut link = None;
let mut width = None;
let mut height = None;
let mut description = None;
while let Some(e) = reader.next() {
match e {
Ok(Event::Start(element)) => {
match element.name() {
b"url" => url = element_text!(reader),
b"title" => title = element_text!(reader),
b"link" => link = element_text!(reader),
b"width" => width = element_text!(reader),
b"height" => height = element_text!(reader),
b"description" => description = element_text!(reader),
_ => skip_element!(reader),
}
}
Ok(Event::End(_)) => {
let url = url.unwrap_or_default();
let title = title.unwrap_or_default();
let link = link.unwrap_or_default();
return Ok((Image {
url: url,
title: title,
link: link,
width: width,
height: height,
description: description,
}, reader))
}
Err(err) => return Err(err.into()),
_ => {}
}
}
Err(Error::EOF)
}
}
impl ToXml for Image {
fn to_xml<W: ::std::io::Write>(&self, writer: &mut XmlWriter<W>) -> Result<(), XmlError> {
let element = Element::new(b"image");
try!(writer.write(Event::Start(element.clone())));
try!(writer.write_text_element(b"url", &self.url));
try!(writer.write_text_element(b"title", &self.title));
try!(writer.write_text_element(b"link", &self.link));
if let Some(width) = self.width.as_ref() {
try!(writer.write_text_element(b"width", width));
}
if let Some(height) = self.height.as_ref() {
try!(writer.write_text_element(b"height", height));
}
if let Some(description) = self.description.as_ref() {
try!(writer.write_text_element(b"description", description));
}
writer.write(Event::End(element))
}
}
|
#![no_std]
use num_traits::int::PrimInt;
#[inline(always)]
pub fn is_bitflag_set<T: PrimInt>(source: T, flag: T) -> bool {
source & flag == flag
}
#[inline(always)]
pub fn set_bitflag<T: PrimInt>(source: &mut T, flag: T) {
*source = *source | flag
}
#[inline(always)]
pub fn unset_bitflag(source: &mut u8, flag: u8) {
*source &= !flag
}
#[inline(always)]
pub fn set_bit(source: &mut u8, index: u8) {
*source |= 1 << index
}
#[inline(always)]
pub fn unset_bit(source: &mut u8, index: u8) {
*source &= !(1 << index);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn basic() {
const READ: u8 = 0b00000001;
const WRITE: u8 = 0b00000010;
const EXEC: u8 = 0b0000100;
let mut permissions: u8 = 0b00000000;
assert!(!is_bitflag_set(permissions, READ) && !is_bitflag_set(permissions, WRITE) && !is_bitflag_set(permissions, EXEC));
set_bitflag(&mut permissions, READ);
assert!(is_bitflag_set(permissions, READ) && !is_bitflag_set(permissions, WRITE) && !is_bitflag_set(permissions, EXEC));
set_bitflag(&mut permissions, WRITE);
assert!(is_bitflag_set(permissions, READ) && is_bitflag_set(permissions, WRITE) && !is_bitflag_set(permissions, EXEC));
set_bitflag(&mut permissions, EXEC);
assert!(is_bitflag_set(permissions, READ) && is_bitflag_set(permissions, WRITE) && is_bitflag_set(permissions, EXEC));
unset_bitflag(&mut permissions, READ);
assert!(!is_bitflag_set(permissions, READ) && is_bitflag_set(permissions, WRITE) && is_bitflag_set(permissions, EXEC));
}
} |
#[derive(Debug, PartialEq)]
pub enum Faction {
Blue,
Red,
}
impl Default for Faction {
fn default() -> Self {
Faction::Blue
}
}
|
//! Indexed collection of values.
//!
//! # Remarks
//!
//! With the ``prelude`` module, we can easily convert a tuple of ``IntoIterator``s
//! into ``Heatmap`` for ease of use. The same can be achieved with the
//! ``new`` method.
//!
//! # Examples
//!
//! Quick plot.
//! ```no_run
//! # use itertools::iproduct;
//! use preexplorer::prelude::*;
//! let values = iproduct!(0..10, 0..5).map(|(x, y)| x + y);
//! pre::Heatmap::new(0..10, 0..5, values).plot("my_identifier").unwrap();
//! ```
//!
//! Compare ``Heatmap``es.
//! ```no_run
//! # use itertools::iproduct;
//! use preexplorer::prelude::*;
//! pre::Heatmaps::new(vec![
//! pre::Heatmap::new(0..10, 0..5, iproduct!(0..10, 0..5).map(|(x, y)| x + y)),
//! pre::Heatmap::new(0..10, 0..5, iproduct!(0..10, 0..5).map(|(x, y)| x * y)),
//! ])
//! .plot("my_identifier").unwrap();
//! ```
// Traits
pub use crate::traits::{Configurable, Plotable, Saveable};
use core::fmt::Display;
use core::ops::Add;
/// Compare various ``Heatmap``s.
pub mod comparison;
pub use comparison::Heatmaps;
/// Indexed sequence of values.
#[derive(Debug, PartialEq, Clone)]
pub struct Heatmap<T, S, U>
where
T: Display + Clone,
S: Display + Clone,
U: Display + Clone,
{
xs: Vec<T>,
ys: Vec<S>,
values: Vec<U>,
config: crate::configuration::Configuration,
}
impl<T, S, U> Heatmap<T, S, U>
where
T: Display + Clone,
S: Display + Clone,
U: Display + Clone,
{
/// Create a new ``Heatmap``.
///
/// # Panics
///
/// The number of values must be equal to the dimension of the grid.
///
/// # Examples
///
/// From a complicated computation.
/// ```
/// # use itertools::iproduct;
/// use preexplorer::prelude::*;
/// let values = iproduct!(0..10, 0..5).map(|(x, y)| x + y);
/// let heatmap = pre::Heatmap::new(0..10, 0..5, values);
/// ```
pub fn new<I, J, K>(xs: I, ys: J, values: K) -> Heatmap<T, S, U>
where
I: IntoIterator<Item = T>,
J: IntoIterator<Item = S>,
K: IntoIterator<Item = U>,
{
let xs: Vec<T> = xs.into_iter().collect();
let ys: Vec<S> = ys.into_iter().collect();
let values: Vec<U> = values.into_iter().collect();
debug_assert!(
xs.len() * ys.len() == values.len(),
"The numbers of values ({}) does not match the grid ({}x{})",
values.len(),
xs.len(),
ys.len()
);
let config = crate::configuration::Configuration::default();
Heatmap {
xs,
ys,
values,
config,
}
}
}
impl<T, S, U> Add for Heatmap<T, S, U>
where
T: Display + Clone,
S: Display + Clone,
U: Display + Clone,
{
type Output = crate::Heatmaps<T, S, U>;
fn add(self, other: crate::Heatmap<T, S, U>) -> crate::Heatmaps<T, S, U> {
let mut cmp = self.into();
cmp += other;
cmp
}
}
impl<T, S, U> Configurable for Heatmap<T, S, U>
where
T: Display + Clone,
S: Display + Clone,
U: Display + Clone,
{
fn configuration_mut(&mut self) -> &mut crate::configuration::Configuration {
&mut self.config
}
fn configuration(&self) -> &crate::configuration::Configuration {
&self.config
}
}
impl<T, S, U> Saveable for Heatmap<T, S, U>
where
T: Display + Clone,
S: Display + Clone,
U: Display + Clone,
{
fn plotable_data(&self) -> String {
let mut plotable_data = String::new();
for i in 0..self.xs.len() {
for j in 0..self.ys.len() {
plotable_data.push_str(&format!(
"{}\t{}\t{}\n",
self.xs[i],
self.ys[j],
self.values[i * self.ys.len() + j]
));
}
}
plotable_data
}
}
impl<T, S, U> Plotable for Heatmap<T, S, U>
where
T: Display + Clone,
S: Display + Clone,
U: Display + Clone,
{
fn plot_script(&self) -> String {
let mut gnuplot_script = self.opening_plot_script();
gnuplot_script += &format!("plot {:?} using 1:2:3 with image\n", self.data_path(),);
gnuplot_script += &self.ending_plot_script();
gnuplot_script
}
}
impl<T> From<ndarray::Array2<T>> for Heatmap<usize, usize, T>
where
T: Display + Clone,
{
fn from(array: ndarray::Array2<T>) -> Self {
let shape = array.shape();
let xs: Vec<usize> = (0..shape[0]).collect();
let ys: Vec<usize> = (0..shape[1]).rev().collect();
let values: Vec<T> = array.t().iter().cloned().collect();
Heatmap::new(xs, ys, values)
}
}
|
use ::opcodes::{AddressingMode, OpCode};
#[derive(Eq, PartialEq, Debug, Clone, Copy)]
pub enum ImmediateBase {
Base10,
Base16,
}
#[derive(Clone, Debug, PartialEq )]
pub enum LexerToken {
Ident(String),
Assignment,
Address(String),
OpenParenthesis,
CloseParenthesis,
Comma,
Period,
Immediate(String, ImmediateBase),
Colon,
}
#[derive(Eq, PartialEq, Debug, Clone)]
pub enum ParserToken {
Label(String),
LabelArg(String),
OpCode(OpCode),
Absolute(String),
RawByte(u8),
OrgDirective(u16),
RawBytes(Vec<u8>),
}
|
use crate::error::ApiError;
use crate::todos::{ Todo, TodoInit };
use actix_web::{ delete, get, post, put, web, HttpResponse, Responder };
use tera::{ Tera, Context };
use serde_json::json;
#[get("/")]
async fn find_all() -> Result<HttpResponse, ApiError> {
let todos = Todo::find_all()?;
Ok(HttpResponse::Ok().json(json!( { "todos": todos } )))
}
#[get("/id/{id}/")]
async fn find(id: web::Path<i32>) -> Result<HttpResponse, ApiError> {
let todo = Todo::find(id.into_inner())?;
Ok(HttpResponse::Ok().json(json!( { "todo": todo } )))
}
#[get("/{val}/")]
async fn find_by(val: web::Path<String>) -> Result<HttpResponse, ApiError> {
let todos = Todo::find_by(&val.into_inner())?;
Ok(HttpResponse::Ok().json(json!( { "todos": todos } )))
}
#[post("/")]
async fn create(todo: web::Json<TodoInit>) -> Result<HttpResponse, ApiError> {
let todo = Todo::create(todo.into_inner())?;
Ok(HttpResponse::Ok().json(json!( { "message": "Created", "todo": todo } )))
}
#[delete("/{id}/")]
async fn delete(id: web::Path<i32>) -> Result<HttpResponse, ApiError> {
let v = id.into_inner();
Todo::delete(v)?;
let message = format!("{} Deleted", v);
Ok(HttpResponse::Ok().json(json!( { "message": message } )))
}
#[put("/{id}/")]
async fn update(id: web::Path<i32>, todo: web::Json<TodoInit>) -> Result<HttpResponse, ApiError> {
let id = id.into_inner();
Todo::update(id, todo.into_inner())?;
let message = format!("Todo with id {} updated", id);
Ok(HttpResponse::Ok().json(json!({ "message": message })))
}
#[get("/")]
async fn view_list(tera: web::Data<Tera>) -> Result<impl Responder, ApiError> {
let todos = Todo::find_all()?;
let mut data = Context::new();
data.insert("title", "My todos");
data.insert("todos", &todos);
let rendered = tera.render("todos/todos.html", &data).unwrap();
Ok(HttpResponse::Ok().body(rendered))
}
#[get("/{id}/")]
async fn view_edit(id: web::Path<i32>, tera: web::Data<Tera>) -> Result<impl Responder, ApiError> {
let id = id.into_inner();
let todo = Todo::find(id)?;
let mut data = Context::new();
data.insert("title", &format!("Edit todos {}", id));
data.insert("todo", &todo);
let rendered = tera.render("todos/edit.html", &data).unwrap();
Ok(HttpResponse::Ok().body(rendered))
}
#[get("/add/")]
async fn view_add(tera: web::Data<Tera>) -> Result<impl Responder, ApiError> {
let mut data = Context::new();
data.insert("title", "Add new todo");
let rendered = tera.render("todos/add.html", &data).unwrap();
Ok(HttpResponse::Ok().body(rendered))
}
pub fn api_routes(cfg: &mut web::ServiceConfig) {
cfg.service(find_all);
cfg.service(find);
cfg.service(find_by);
cfg.service(create);
cfg.service(delete);
cfg.service(update);
}
pub fn view_routes(cfg: &mut web::ServiceConfig) {
cfg.service(view_add);
cfg.service(view_list);
cfg.service(view_edit);
}
|
use crate::iconv::Iconv;
use crate::iconv::IconvError;
use crate::locale_ffi::{__locale_struct, freelocale, newlocale, uselocale, LC_ALL_MASK, };
use std::ffi::CString;
use std::ptr;
/// Transliterates text in the same thread where it is called.
/// Be aware this calls the unsafe ffi function uselocale from C.
/// If you don't know if that is safe, preffer to use TextTransliterateOffThread instead
#[derive(Debug, Default)]
pub struct TextTransliterate;
impl TextTransliterate {
pub fn new() -> TextTransliterate {
TextTransliterate {}
}
fn set_thread_locale<S: Into<String>>(&self, locale: S) -> Result<(), &'static str> {
let locale = locale.into();
if let Ok(locale) = CString::new(locale) {
let locale = locale.as_ptr();
let null: *mut __locale_struct = ptr::null_mut();
unsafe {
let locale = newlocale(LC_ALL_MASK, locale, null);
let old_locale = uselocale(locale);
//uselocale returns in some systems 0xffff_ffff_ffff_ffff instead of locale_t 0.
if !old_locale.is_null()
&& old_locale != 0xffff_ffff_ffff_ffff as *mut __locale_struct
{
freelocale(old_locale)
}
};
Ok(())
} else {
Err("Not able to decode locale text")
}
}
pub fn transliterate<S: Into<String>>(
&self,
text: S,
locale: S,
) -> Result<String, TransliterationError> {
let text = text.into();
let locale = locale.into();
if self.set_thread_locale(locale).is_ok() {
let iconv = Iconv::new("ascii//TRANSLIT//IGNORE", "utf-8");
if let Ok(mut iconv) = iconv {
let mut buf = Vec::new();
let result = iconv.convert(&text.as_bytes(), &mut buf, 0);
if let Err(error) = result {
return Err(TransliterationError::IconvError(error));
}
let output_utf8 = String::from_utf8(buf);
match output_utf8 {
Ok(output) => Ok(output),
Err(error) => Err(TransliterationError::IconvOutputNotUtf8(error)),
}
} else {
Err(TransliterationError::IconvStartFailed)
}
} else {
Err(TransliterationError::ErrorChangingThreadLocale)
}
}
}
#[derive(Debug)]
pub enum TransliterationError {
IconvError(IconvError),
IconvStartFailed,
ErrorChangingThreadLocale,
IconvOutputNotUtf8(std::string::FromUtf8Error),
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
let tt = TextTransliterate::new();
let result = tt.transliterate("ü ä ö ß Ü Ä Ö ç ñ 的 😒", "de_DE.UTF-8");
if let Ok(result) = result {
assert_eq!("ue ae oe ss UE AE OE c n ? ?", result);
} else {
unreachable!()
}
}
}
|
#[macro_use] extern crate nickel;
use nickel::{Nickel, HttpRouter, StaticFilesHandler, FaviconHandler};
use std::collections::HashMap;
// Needed For Post
extern crate hyper;
use std::io::Read;
use hyper::Client;
use hyper::header::Connection;
// Needed For Post
fn main() {
//Implement A Server
let mut server = Nickel::new();
//Route Using Mustache Templateing Agent
server.get("/", middleware! { |req, mut res|
let mut data = HashMap::<&str, &str>::new();
data.insert("tab-title","Verge | Web Development, Design, and SEO Company In Tucson");
data.insert("title","<span style=\"margin-left: 28.9px; position: absolute; margin-top: 17px;vertical-align: middle;\">V</span><span style=\"font-size: 145%; color: #C36159;vertical-align: middle;\">◎</span><span style=\"margin-left: -7px; vertical-align: middle;\">erge</span></h1>");
data.insert("sub-title", "Web Development");
data.insert("hint", "<div id=\"hint\"><span id=\"message\">Use Arrow Keys</span><div>❯</div></div>");
return res.render("views/index.hbs", &data);
});
server.get("/contact", middleware! { |req, mut res|
let mut data = HashMap::new();
data.insert("tab-title","Contact Us | Web Development, Design, and SEO Company In Tucson");
data.insert("title","Contact");
// let mut s = String::new();
// req.origin.read_to_string(&mut s);
return res.render("views/contact.hbs", &data)
});
server.post("/confirmation", middleware!{ |req, res|
let mut form_data = String::new();
req.origin.read_to_string(&mut form_data);
println!("{}", form_data);
// let mut data = HashMap::new();
// data.insert("tab-title","Submit | Verge Web Development, Design, and SEO Company In Tucson");
// data.insert("title","Submit");
// data.insert("confirmation", &the);
// return res.render("views/confirmation.hbs", &data)
});
server.get("/about", middleware!{ |req, res|
let mut data = HashMap::new();
data.insert("tab-title","About");
data.insert("title","About");
return res.render("views/about.hbs", &data);
});
//Serve A Static Directory
server.utilize(StaticFilesHandler::new("public/"));
//server.utilize(FaviconHandler::new("/images/file"));
server.listen("0.0.0.0:3000");
}
|
//! Texture resource handling.
use std::error::Error;
use std::fmt::{self, Display, Formatter};
use futures::{Async, Future, Poll};
use gfx::format::SurfaceType;
use imagefmt::ColFmt;
use rayon::ThreadPool;
use renderer::{Error as RendererError, Texture, TextureBuilder};
use assets::{Asset, AssetFuture, AssetPtr, AssetSpec, Cache, Context};
use assets::formats::textures::ImageData;
use ecs::{Component, VecStorage};
use ecs::rendering::resources::{Factory, FactoryFuture};
/// Error that can occur during texture creation
#[derive(Debug)]
pub enum TextureError {
/// Error occured in renderer
Renderer(RendererError),
/// Color format unsupported
UnsupportedColorFormat(ColFmt),
/// Texture is oversized
UnsupportedSize {
/// Maximum size of texture (width, height)
max: (usize, usize),
/// Image size (width, height)
got: (usize, usize),
},
}
impl Error for TextureError {
fn description(&self) -> &str {
match *self {
TextureError::Renderer(ref err) => err.description(),
TextureError::UnsupportedColorFormat(_) => "Unsupported color format",
TextureError::UnsupportedSize { .. } => "Unsupported size",
}
}
fn cause(&self) -> Option<&Error> {
match *self {
TextureError::Renderer(ref err) => Some(err),
_ => None,
}
}
}
impl Display for TextureError {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
match *self {
TextureError::Renderer(ref err) => write!(fmt, "Render error: {}", err),
TextureError::UnsupportedColorFormat(col_fmt) => {
write!(fmt, "Unsupported color format: {:?}", col_fmt)
}
TextureError::UnsupportedSize { max, got } => {
write!(fmt, "Unsupported size. max: {:?}, got: {:?}", max, got)
}
}
}
}
enum Inner {
Factory(FactoryFuture<Texture, RendererError>),
Err(Option<TextureError>),
}
/// Will be `TextureComponent` result type of `TextureContext::create_asset`
pub struct TextureFuture(Inner);
impl TextureFuture {
fn factory(future: FactoryFuture<Texture, RendererError>) -> Self {
TextureFuture(Inner::Factory(future))
}
fn from_error(error: TextureError) -> Self {
TextureFuture(Inner::Err(Some(error)))
}
fn unsupported_color_format(fmt: ColFmt) -> Self {
Self::from_error(TextureError::UnsupportedColorFormat(fmt))
}
fn unsupported_size(width: usize, height: usize) -> Self {
Self::from_error(TextureError::UnsupportedSize {
max: (u16::max_value() as usize, u16::max_value() as usize),
got: (width, height),
})
}
}
impl Future for TextureFuture {
type Item = TextureComponent;
type Error = TextureError;
fn poll(&mut self) -> Poll<TextureComponent, TextureError> {
match self.0 {
Inner::Factory(ref mut future) => match future.poll() {
Ok(Async::NotReady) => Ok(Async::NotReady),
Ok(Async::Ready(texture)) => Ok(Async::Ready(TextureComponent::new(texture))),
Err(err) => Err(TextureError::Renderer(err)),
},
Inner::Err(ref mut err) => Err(err.take().expect("polling completed future")),
}
}
}
/// Wraps `Texture` into component
#[derive(Clone, Debug)]
pub struct TextureComponent(pub AssetPtr<Texture, TextureComponent>);
impl AsRef<Texture> for TextureComponent {
fn as_ref(&self) -> &Texture {
self.0.inner_ref()
}
}
impl AsMut<Texture> for TextureComponent {
fn as_mut(&mut self) -> &mut Texture {
self.0.inner_mut()
}
}
impl TextureComponent {
/// Create new `TextureComponent` from `Texture`
pub fn new(texture: Texture) -> Self {
TextureComponent(AssetPtr::new(texture))
}
}
impl Component for TextureComponent {
type Storage = VecStorage<Self>;
}
impl Asset for TextureComponent {
type Context = TextureContext;
}
/// Context to create textures from images
pub struct TextureContext {
cache: Cache<AssetFuture<TextureComponent>>,
factory: Factory,
}
impl TextureContext {
pub(crate) fn new(factory: Factory) -> Self {
TextureContext {
cache: Cache::new(),
factory: factory,
}
}
}
impl Context for TextureContext {
type Asset = TextureComponent;
type Data = ImageData;
type Error = TextureError;
type Result = TextureFuture;
fn category(&self) -> &'static str {
"texture"
}
fn create_asset(&self, image: ImageData, _: &ThreadPool) -> TextureFuture {
fn convert_color_format(fmt: ColFmt) -> Option<SurfaceType> {
match fmt {
ColFmt::Auto => unreachable!(),
ColFmt::RGBA => Some(SurfaceType::R8_G8_B8_A8),
ColFmt::BGRA => Some(SurfaceType::B8_G8_R8_A8),
_ => None,
}
}
let image = image.raw;
let fmt = match convert_color_format(image.fmt) {
Some(fmt) => fmt,
None => return TextureFuture::unsupported_color_format(image.fmt),
};
if image.w > u16::max_value() as usize || image.h > u16::max_value() as usize {
return TextureFuture::unsupported_size(image.w, image.h);
}
let tb = TextureBuilder::new(image.buf)
.with_format(fmt)
.with_size(image.w as u16, image.h as u16);
TextureFuture::factory(self.factory.create_texture(tb))
}
fn cache(&self, spec: AssetSpec, asset: AssetFuture<TextureComponent>) {
self.cache.insert(spec, asset);
}
fn retrieve(&self, spec: &AssetSpec) -> Option<AssetFuture<TextureComponent>> {
self.cache.get(spec)
}
fn update(&self, spec: &AssetSpec, asset: AssetFuture<TextureComponent>) {
if let Some(asset) = self.cache
.access(spec, |a| match a.peek() {
Some(Ok(a)) => {
a.0.push_update(asset);
None
}
_ => Some(asset),
})
.and_then(|a| a)
{
self.cache.insert(spec.clone(), asset);
}
}
fn clear(&self) {
self.cache.retain(|_, a| match a.peek() {
Some(Ok(a)) => a.0.is_shared(),
_ => true,
});
}
fn clear_all(&self) {
self.cache.clear_all();
}
}
|
#[doc = "Reader of register PERIPH_ID_7"]
pub type R = crate::R<u32, super::PERIPH_ID_7>;
#[doc = "Reader of field `PERIPH_ID_7`"]
pub type PERIPH_ID_7_R = crate::R<u8, u8>;
impl R {
#[doc = "Bits 0:7 - not used"]
#[inline(always)]
pub fn periph_id_7(&self) -> PERIPH_ID_7_R {
PERIPH_ID_7_R::new((self.bits & 0xff) as u8)
}
}
|
extern crate rocket;
use rocket::http::RawStr;
#[get("/msg/<msg>")]
fn handler(msg: &RawStr) -> String {
msg.as_str().to_string()
}
impl MsgController for Controller {
fn handlers() {
vec![
handler,
handler
]
}
} |
pub mod linksdb;
|
extern crate rusty_machine as rm;
use rm::learning::optim::Optimizable;
use rm::linalg::Matrix;
use rm::learning::optim::grad_desc::GradientDesc;
use rm::learning::optim::OptimAlgorithm;
// f(x,y)=((x,y)-(cx,cy))^2
struct XSqModel2D {
c: (f64, f64),
}
impl Optimizable for XSqModel2D {
type Inputs = Matrix<(f64, f64)>;
type Targets = Matrix<f64>;
fn compute_grad(&self, params: &[(f64, f64)], _: &Matrix<f64>, _: &Matrix<f64>) -> (f64, Vec<f64>) {
let x_dif = params[0].1 - self.c.1;
let y_dif = params[0].2 - self.c.2;
let x_dif2 = x_dif * x_dif;
let y_dif2 = y_dif * y_dif;
let value = () * (params[0] - self.c);
let first_derivate = vec![2.0 * (params[0] - self.c)];
(value, first_derivate)
}
}
fn main() {
let x_sq = XSqModel2D { c: (10.0, 10.0) };
let gd = GradientDesc::default();
let test_data = vec![500f64];
let params = gd.optimize(&x_sq,
&test_data[..],
&Matrix::zeros(1, 1),
&Matrix::zeros(1, 1));
println!("params[0]: {:?})", params[0]);
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use super::validate;
pub struct Step {
pub actions: Vec<validate::Action>,
//metrics: Vec<metrics::Metric>, Ignore for now - will be in a future CL
}
pub struct Trial {
pub name: String,
pub steps: Vec<Step>,
}
/// Language- or library-specific quirks needed for proper prediction
/// of the VMO contents.
pub struct Quirks {
/// In some languages (such as C++) creating two things with the same
/// name creates two entries in the VMO. In others (such as Dart) the
/// second entry will replace the first.
replace_same_name: bool,
}
pub struct TrialSet {
quirks: Quirks,
}
impl Step {
/* Ignore for now - will be in a future CL
pub fn run_metrics(
&self,
info: &InfoTree,
results: &mut results::Results,
) -> Result<(), Error> {
for metric in self.metrics.iter() {
metric.process(info, results)?;
}
Ok(())
}*/
}
impl TrialSet {
/// Call this if the second create with the same name replaces the first entry.
/// Don't call this if creating two of the same name makes two entries.
#[allow(dead_code)]
pub fn replace_same_name(&mut self) {
self.quirks.replace_same_name = true;
}
pub fn trials() -> Vec<Trial> {
vec![basic_trial()]
}
pub fn quirks(&self) -> &Quirks {
&self.quirks
}
}
impl Quirks {
#[allow(dead_code)]
pub fn does_same_name_replace(&self) -> bool {
self.replace_same_name
}
}
fn basic_trial() -> Trial {
Trial {
name: "Basic Trial".into(),
steps: vec![Step {
actions: vec![
validate::Action::CreateNode(validate::CreateNode {
parent: validate::ROOT_ID,
id: 1,
name: "child".into(),
}),
validate::Action::DeleteNode(validate::DeleteNode { id: 1 }),
],
// metrics: vec![],
}],
}
}
pub fn trial_set() -> TrialSet {
TrialSet { quirks: Quirks { replace_same_name: false } }
}
|
pub use macro_generator::*;
pub use core::Cake;
|
// use quicli::prelude::*;
// use quicli::prelude::structopt::StructOpt;
// use super::*;
// use quicli::prelude::structopt::*;
// use structopt::*;
use std::path::{PathBuf};
#[derive(StructOpt, Debug)]
struct IOArg {
#[structopt(help = "input file of hex-string/binary data (or reading from stdin if not given)")]
input_file: Option<String>,
// #[structopt(short = "o", long = "output", help = "output (default: print to terminal)", parse(from_os_str))]
// output: Option<PathBuf>,
}
#[derive(StructOpt, Debug)]
struct AsmArg {
#[structopt(long = "att", help = "use AT&T assembly instead of default Intel syntax)")]
att_syntax: bool,
#[structopt(short = "b", long = "base", help = "base address (default: 0x0)")]
base_address: Option<usize>,
}
#[derive(StructOpt, Debug)]
struct SuppOpt {
#[structopt(short = "v", long = "verbose", help = "show instruction's details", parse(from_occurrences))]
verbose: u8,
#[structopt(short = "a", long = "address", help = "show instruction's address")]
show_address: bool,
#[structopt(short = "b", long = "binary", help = "show instruction's binary data")]
show_binary: bool
}
#[derive(StructOpt, Debug)]
#[structopt(name = "capstone-based x86/amd64 disassembler")]
struct RdisOpt {
#[structopt(flatten)]
io: IOArg,
#[structopt(flatten)]
syntax: AsmArg,
#[structopt(flatten)]
supp: SuppOpt,
}
pub(super) struct Config<'a> {
pub asm_file: Option<&'a str>, // read from stdin if None
pub att_syntax: bool, // false: intel, true: att
pub base_address: usize,
pub show_address: bool, // true: show instruction's address
pub show_binary: bool, // true: show instruction's binary data
pub show_rw_registers: bool, // true: show instruction's read/write registers
} |
#![allow(non_camel_case_types)]
pub struct u16_l(u16);
pub struct u32_l(u32);
pub struct u64_l(u64);
pub struct u16_b(u16);
pub struct u32_b(u32);
pub struct u64_b(u64);
pub struct i16_l(i16);
pub struct i32_l(i32);
pub struct i64_l(i64);
pub struct i16_b(i16);
pub struct i32_b(i32);
pub struct i64_b(i64);
use std::fmt::{Debug, Display, Error as FmtError, Formatter};
use decode::{Decodable, DecodeError, ReadBuffer};
use encode::EncodableSized;
macro_rules! impl_prim_endian {
($t: ty, $t_exp: expr, $inner_t: ty, $size: expr, $en_s: expr, $to_en: ident, $from_en: path) => {
impl $t {
/// Create a new $t.
pub fn new(n: $inner_t) -> $t {
$t_exp(n.$to_en())
}
/// Get inner value whose endian is os specific.
pub fn value(&self) -> $inner_t {
$from_en(self.0)
}
}
impl EncodableSized for $t {
const SIZE: usize = $size;
type Array = [u8; $size];
fn bytes(&self) -> [u8; $size] {
unsafe { ::std::mem::transmute::<$inner_t, [u8; $size]>(self.0) }
}
}
impl Decodable for $t {
fn decode<R: ReadBuffer>(buf: &mut R) -> Result<$t, DecodeError> {
Ok($t_exp(unsafe { *(buf.read_bytes($size)? as *const _ as *const $inner_t) }))
}
}
impl Debug for $t {
fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {
// ex) "Big endian 42"
write!(f, "{} {}", $en_s, self.value())
}
}
impl Display for $t {
fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {
// ex) "42"
write!(f, "{}", self.value())
}
}
};
}
impl_prim_endian!(u16_l, u16_l, u16, 2, "Little endian", to_le, u16::from_le);
impl_prim_endian!(u32_l, u32_l, u32, 4, "Little endian", to_le, u32::from_le);
impl_prim_endian!(u64_l, u64_l, u64, 8, "Little endian", to_le, u64::from_le);
impl_prim_endian!(u16_b, u16_b, u16, 2, "Big endian", to_be, u16::from_be);
impl_prim_endian!(u32_b, u32_b, u32, 4, "Big endian", to_be, u32::from_be);
impl_prim_endian!(u64_b, u64_b, u64, 8, "Big endian", to_be, u64::from_be);
impl_prim_endian!(i16_l, i16_l, i16, 2, "Little endian", to_le, i16::from_le);
impl_prim_endian!(i32_l, i32_l, i32, 4, "Little endian", to_le, i32::from_le);
impl_prim_endian!(i64_l, i64_l, i64, 8, "Little endian", to_le, i64::from_le);
impl_prim_endian!(i16_b, i16_b, i16, 2, "Big endian", to_be, i16::from_be);
impl_prim_endian!(i32_b, i32_b, i32, 4, "Big endian", to_be, i32::from_be);
impl_prim_endian!(i64_b, i64_b, i64, 8, "Big endian", to_be, i64::from_be);
impl EncodableSized for u8 {
const SIZE: usize = 1;
type Array = [u8; 1];
fn bytes(&self) -> [u8; 1] {
[*self]
}
}
impl Decodable for u8 {
fn decode<R: ReadBuffer>(buf: &mut R) -> Result<u8, DecodeError> {
Ok(buf.read_bytes(1)?[0])
}
}
impl EncodableSized for i8 {
const SIZE: usize = 1;
type Array = [u8; 1];
fn bytes(&self) -> [u8; 1] {
[*self as u8]
}
}
impl Decodable for i8 {
fn decode<R: ReadBuffer>(buf: &mut R) -> Result<i8, DecodeError> {
Ok(buf.read::<u8>()? as i8)
}
}
|
use amethyst::{
assets::{PrefabData, ProgressCounter},
derive::PrefabData,
ecs::Entity,
Error,
};
use serde::{Deserialize, Serialize};
use crate::components::map::MapCoords;
use crate::components::npc::{Named, Movement};
#[derive(Debug, Deserialize, Serialize, PrefabData)]
#[serde(deny_unknown_fields)]
pub struct Stalker {
name: Named,
position: MapCoords,
movement: Movement,
} |
pub mod path_fixer;
pub fn is_slice_equal_permutation<T: PartialEq>(a: &[T], b: &[T]) -> bool {
if a.is_empty() && !b.is_empty() {
false
} else {
// TODO: Find a way to do this faster.
for source in a.iter() {
let mut found = false;
for other in b.iter() {
if other == source {
found = true;
break;
}
}
if !found {
return false;
}
}
true
}
}
|
#[doc = "Register `RSR` reader"]
pub type R = crate::R<RSR_SPEC>;
#[doc = "Register `RSR` writer"]
pub type W = crate::W<RSR_SPEC>;
#[doc = "Field `RMVF` reader - remove reset flag Set and reset by software to reset the value of the reset flags."]
pub type RMVF_R = crate::BitReader<RMVF_A>;
#[doc = "remove reset flag Set and reset by software to reset the value of the reset flags.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RMVF_A {
#[doc = "0: Not clearing the the reset flags"]
NotActive = 0,
#[doc = "1: Clear the reset flags"]
Clear = 1,
}
impl From<RMVF_A> for bool {
#[inline(always)]
fn from(variant: RMVF_A) -> Self {
variant as u8 != 0
}
}
impl RMVF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RMVF_A {
match self.bits {
false => RMVF_A::NotActive,
true => RMVF_A::Clear,
}
}
#[doc = "Not clearing the the reset flags"]
#[inline(always)]
pub fn is_not_active(&self) -> bool {
*self == RMVF_A::NotActive
}
#[doc = "Clear the reset flags"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == RMVF_A::Clear
}
}
#[doc = "Field `RMVF` writer - remove reset flag Set and reset by software to reset the value of the reset flags."]
pub type RMVF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, RMVF_A>;
impl<'a, REG, const O: u8> RMVF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Not clearing the the reset flags"]
#[inline(always)]
pub fn not_active(self) -> &'a mut crate::W<REG> {
self.variant(RMVF_A::NotActive)
}
#[doc = "Clear the reset flags"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(RMVF_A::Clear)
}
}
#[doc = "Field `CDRSTF` reader - CPU domain power-switch reset flag Reset by software by writing the RMVF bit. Set by hardware when a the CPU domain exits from DStop or after of power-on reset. Set also when the CPU domain exists DStop2 but only when a pad reset has occurred during DStop2 (PINRST bit also set by hardware)"]
pub type CDRSTF_R = crate::BitReader<CDRSTFR_A>;
#[doc = "CPU domain power-switch reset flag Reset by software by writing the RMVF bit. Set by hardware when a the CPU domain exits from DStop or after of power-on reset. Set also when the CPU domain exists DStop2 but only when a pad reset has occurred during DStop2 (PINRST bit also set by hardware)\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CDRSTFR_A {
#[doc = "0: No reset occoured for block"]
NoResetOccoured = 0,
#[doc = "1: Reset occoured for block"]
ResetOccourred = 1,
}
impl From<CDRSTFR_A> for bool {
#[inline(always)]
fn from(variant: CDRSTFR_A) -> Self {
variant as u8 != 0
}
}
impl CDRSTF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CDRSTFR_A {
match self.bits {
false => CDRSTFR_A::NoResetOccoured,
true => CDRSTFR_A::ResetOccourred,
}
}
#[doc = "No reset occoured for block"]
#[inline(always)]
pub fn is_no_reset_occoured(&self) -> bool {
*self == CDRSTFR_A::NoResetOccoured
}
#[doc = "Reset occoured for block"]
#[inline(always)]
pub fn is_reset_occourred(&self) -> bool {
*self == CDRSTFR_A::ResetOccourred
}
}
#[doc = "Field `BORRSTF` reader - BOR reset flag Reset by software by writing the RMVF bit. Set by hardware when a BOR reset occurs (pwr_bor_rst)."]
pub use CDRSTF_R as BORRSTF_R;
#[doc = "Field `PINRSTF` reader - pin reset flag (NRST) Reset by software by writing the RMVF bit. Set by hardware when a reset from pin occurs."]
pub use CDRSTF_R as PINRSTF_R;
#[doc = "Field `PORRSTF` reader - POR/PDR reset flag Reset by software by writing the RMVF bit. Set by hardware when a POR/PDR reset occurs."]
pub use CDRSTF_R as PORRSTF_R;
#[doc = "Field `SFTRSTF` reader - system reset from CPU reset flag Reset by software by writing the RMVF bit. Set by hardware when the system reset is due to CPU.The CPU can generate a system reset by writing SYSRESETREQ bit of AIRCR register of the core M7."]
pub use CDRSTF_R as SFTRSTF_R;
#[doc = "Field `IWDGRSTF` reader - independent watchdog reset flag Reset by software by writing the RMVF bit. Set by hardware when an independent watchdog reset occurs."]
pub use CDRSTF_R as IWDGRSTF_R;
#[doc = "Field `WWDGRSTF` reader - window watchdog reset flag Reset by software by writing the RMVF bit. Set by hardware when a window watchdog reset occurs."]
pub use CDRSTF_R as WWDGRSTF_R;
#[doc = "Field `LPWRRSTF` reader - reset due to illegal CD DStop or CD DStop2 or CPU CStop flag Reset by software by writing the RMVF bit. Set by hardware when the CPU domain goes erroneously in DStop or DStop2, or when the CPU goes erroneously in CStop."]
pub use CDRSTF_R as LPWRRSTF_R;
impl R {
#[doc = "Bit 16 - remove reset flag Set and reset by software to reset the value of the reset flags."]
#[inline(always)]
pub fn rmvf(&self) -> RMVF_R {
RMVF_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 19 - CPU domain power-switch reset flag Reset by software by writing the RMVF bit. Set by hardware when a the CPU domain exits from DStop or after of power-on reset. Set also when the CPU domain exists DStop2 but only when a pad reset has occurred during DStop2 (PINRST bit also set by hardware)"]
#[inline(always)]
pub fn cdrstf(&self) -> CDRSTF_R {
CDRSTF_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 21 - BOR reset flag Reset by software by writing the RMVF bit. Set by hardware when a BOR reset occurs (pwr_bor_rst)."]
#[inline(always)]
pub fn borrstf(&self) -> BORRSTF_R {
BORRSTF_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - pin reset flag (NRST) Reset by software by writing the RMVF bit. Set by hardware when a reset from pin occurs."]
#[inline(always)]
pub fn pinrstf(&self) -> PINRSTF_R {
PINRSTF_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - POR/PDR reset flag Reset by software by writing the RMVF bit. Set by hardware when a POR/PDR reset occurs."]
#[inline(always)]
pub fn porrstf(&self) -> PORRSTF_R {
PORRSTF_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - system reset from CPU reset flag Reset by software by writing the RMVF bit. Set by hardware when the system reset is due to CPU.The CPU can generate a system reset by writing SYSRESETREQ bit of AIRCR register of the core M7."]
#[inline(always)]
pub fn sftrstf(&self) -> SFTRSTF_R {
SFTRSTF_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 26 - independent watchdog reset flag Reset by software by writing the RMVF bit. Set by hardware when an independent watchdog reset occurs."]
#[inline(always)]
pub fn iwdgrstf(&self) -> IWDGRSTF_R {
IWDGRSTF_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 28 - window watchdog reset flag Reset by software by writing the RMVF bit. Set by hardware when a window watchdog reset occurs."]
#[inline(always)]
pub fn wwdgrstf(&self) -> WWDGRSTF_R {
WWDGRSTF_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 30 - reset due to illegal CD DStop or CD DStop2 or CPU CStop flag Reset by software by writing the RMVF bit. Set by hardware when the CPU domain goes erroneously in DStop or DStop2, or when the CPU goes erroneously in CStop."]
#[inline(always)]
pub fn lpwrrstf(&self) -> LPWRRSTF_R {
LPWRRSTF_R::new(((self.bits >> 30) & 1) != 0)
}
}
impl W {
#[doc = "Bit 16 - remove reset flag Set and reset by software to reset the value of the reset flags."]
#[inline(always)]
#[must_use]
pub fn rmvf(&mut self) -> RMVF_W<RSR_SPEC, 16> {
RMVF_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "RCC reset status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rsr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rsr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct RSR_SPEC;
impl crate::RegisterSpec for RSR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`rsr::R`](R) reader structure"]
impl crate::Readable for RSR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`rsr::W`](W) writer structure"]
impl crate::Writable for RSR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets RSR to value 0x00e8_0000"]
impl crate::Resettable for RSR_SPEC {
const RESET_VALUE: Self::Ux = 0x00e8_0000;
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub mod servers {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
) -> std::result::Result<Server, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}",
&operation_config.base_path, subscription_id, resource_group_name, server_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: Server = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
parameters: &Server,
) -> std::result::Result<create::Response, create::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}",
&operation_config.base_path, subscription_id, resource_group_name, server_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(create::BuildRequestError)?;
let rsp = client.execute(req).await.context(create::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create::ResponseBytesError)?;
let rsp_value: Server = serde_json::from_slice(&body).context(create::DeserializeError { body })?;
Ok(create::Response::Ok200(rsp_value))
}
StatusCode::CREATED => {
let body: bytes::Bytes = rsp.bytes().await.context(create::ResponseBytesError)?;
let rsp_value: Server = serde_json::from_slice(&body).context(create::DeserializeError { body })?;
Ok(create::Response::Created201(rsp_value))
}
StatusCode::ACCEPTED => Ok(create::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(create::DeserializeError { body })?;
create::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(Server),
Created201(Server),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
parameters: &ServerForUpdate,
) -> std::result::Result<update::Response, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}",
&operation_config.base_path, subscription_id, resource_group_name, server_name
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: Server = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(update::Response::Ok200(rsp_value))
}
StatusCode::ACCEPTED => Ok(update::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(Server),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}",
&operation_config.base_path, subscription_id, resource_group_name, server_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(delete::Response::Ok200),
StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(delete::DeserializeError { body })?;
delete::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<ServerListResult, list_by_resource_group::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers",
&operation_config.base_path, subscription_id, resource_group_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_resource_group::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_resource_group::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_resource_group::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?;
let rsp_value: ServerListResult =
serde_json::from_slice(&body).context(list_by_resource_group::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list_by_resource_group::DeserializeError { body })?;
list_by_resource_group::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_resource_group {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<ServerListResult, list::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.DBForMySql/flexibleServers",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: ServerListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
list::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn restart(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
) -> std::result::Result<restart::Response, restart::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/restart",
&operation_config.base_path, subscription_id, resource_group_name, server_name
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(restart::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.header(reqwest::header::CONTENT_LENGTH, 0);
let req = req_builder.build().context(restart::BuildRequestError)?;
let rsp = client.execute(req).await.context(restart::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(restart::Response::Ok200),
StatusCode::ACCEPTED => Ok(restart::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(restart::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(restart::DeserializeError { body })?;
restart::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod restart {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn start(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
) -> std::result::Result<start::Response, start::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/start",
&operation_config.base_path, subscription_id, resource_group_name, server_name
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(start::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.header(reqwest::header::CONTENT_LENGTH, 0);
let req = req_builder.build().context(start::BuildRequestError)?;
let rsp = client.execute(req).await.context(start::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(start::Response::Ok200),
StatusCode::ACCEPTED => Ok(start::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(start::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(start::DeserializeError { body })?;
start::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod start {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn stop(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
) -> std::result::Result<stop::Response, stop::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/stop",
&operation_config.base_path, subscription_id, resource_group_name, server_name
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(stop::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.header(reqwest::header::CONTENT_LENGTH, 0);
let req = req_builder.build().context(stop::BuildRequestError)?;
let rsp = client.execute(req).await.context(stop::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(stop::Response::Ok200),
StatusCode::ACCEPTED => Ok(stop::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(stop::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(stop::DeserializeError { body })?;
stop::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod stop {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod replicas {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list_by_server(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
) -> std::result::Result<ServerListResult, list_by_server::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/replicas",
&operation_config.base_path, subscription_id, resource_group_name, server_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_server::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_server::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_server::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_server::ResponseBytesError)?;
let rsp_value: ServerListResult = serde_json::from_slice(&body).context(list_by_server::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_server::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list_by_server::DeserializeError { body })?;
list_by_server::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_server {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod server_keys {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
key_name: &str,
) -> std::result::Result<ServerKey, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/keys/{}",
&operation_config.base_path, subscription_id, resource_group_name, server_name, key_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ServerKey = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
key_name: &str,
parameters: &ServerKey,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/keys/{}",
&operation_config.base_path, subscription_id, resource_group_name, server_name, key_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: ServerKey = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
StatusCode::CREATED => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: ServerKey = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Created201(rsp_value))
}
StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
create_or_update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(ServerKey),
Created201(ServerKey),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
key_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/keys/{}",
&operation_config.base_path, subscription_id, resource_group_name, server_name, key_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(delete::Response::Ok200),
StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(delete::DeserializeError { body })?;
delete::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_server(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
) -> std::result::Result<ServerKeyListResult, list_by_server::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/keys",
&operation_config.base_path, subscription_id, resource_group_name, server_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_server::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_server::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_server::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_server::ResponseBytesError)?;
let rsp_value: ServerKeyListResult = serde_json::from_slice(&body).context(list_by_server::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_server::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list_by_server::DeserializeError { body })?;
list_by_server::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_server {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod firewall_rules {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
firewall_rule_name: &str,
) -> std::result::Result<FirewallRule, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/firewallRules/{}",
&operation_config.base_path, subscription_id, resource_group_name, server_name, firewall_rule_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: FirewallRule = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
firewall_rule_name: &str,
parameters: &FirewallRule,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/firewallRules/{}",
&operation_config.base_path, subscription_id, resource_group_name, server_name, firewall_rule_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: FirewallRule = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
StatusCode::CREATED => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: FirewallRule = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Created201(rsp_value))
}
StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
create_or_update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(FirewallRule),
Created201(FirewallRule),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
firewall_rule_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/firewallRules/{}",
&operation_config.base_path, subscription_id, resource_group_name, server_name, firewall_rule_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(delete::Response::Ok200),
StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(delete::DeserializeError { body })?;
delete::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_server(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
) -> std::result::Result<FirewallRuleListResult, list_by_server::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/firewallRules",
&operation_config.base_path, subscription_id, resource_group_name, server_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_server::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_server::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_server::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_server::ResponseBytesError)?;
let rsp_value: FirewallRuleListResult = serde_json::from_slice(&body).context(list_by_server::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_server::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list_by_server::DeserializeError { body })?;
list_by_server::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_server {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod databases {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
database_name: &str,
) -> std::result::Result<Database, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/databases/{}",
&operation_config.base_path, subscription_id, resource_group_name, server_name, database_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: Database = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
database_name: &str,
parameters: &Database,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/databases/{}",
&operation_config.base_path, subscription_id, resource_group_name, server_name, database_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: Database = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
StatusCode::CREATED => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: Database = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Created201(rsp_value))
}
StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
create_or_update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(Database),
Created201(Database),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
database_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/databases/{}",
&operation_config.base_path, subscription_id, resource_group_name, server_name, database_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(delete::Response::Ok200),
StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(delete::DeserializeError { body })?;
delete::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_server(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
) -> std::result::Result<DatabaseListResult, list_by_server::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/databases",
&operation_config.base_path, subscription_id, resource_group_name, server_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_server::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_server::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_server::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_server::ResponseBytesError)?;
let rsp_value: DatabaseListResult = serde_json::from_slice(&body).context(list_by_server::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_server::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list_by_server::DeserializeError { body })?;
list_by_server::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_server {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod configurations {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
configuration_name: &str,
) -> std::result::Result<Configuration, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/configurations/{}",
&operation_config.base_path, subscription_id, resource_group_name, server_name, configuration_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: Configuration = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
configuration_name: &str,
parameters: &Configuration,
) -> std::result::Result<update::Response, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/configurations/{}",
&operation_config.base_path, subscription_id, resource_group_name, server_name, configuration_name
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: Configuration = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(update::Response::Ok200(rsp_value))
}
StatusCode::ACCEPTED => Ok(update::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(Configuration),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_server(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
server_name: &str,
) -> std::result::Result<ConfigurationListResult, list_by_server::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBForMySql/flexibleServers/{}/configurations",
&operation_config.base_path, subscription_id, resource_group_name, server_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_server::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_server::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_server::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_server::ResponseBytesError)?;
let rsp_value: ConfigurationListResult =
serde_json::from_slice(&body).context(list_by_server::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_server::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list_by_server::DeserializeError { body })?;
list_by_server::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_server {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod location_based_capabilities {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
location_name: &str,
) -> std::result::Result<CapabilitiesListResult, list::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.DBForMySql/locations/{}/capabilities",
&operation_config.base_path, subscription_id, location_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: CapabilitiesListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
list::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod check_virtual_network_subnet_usage {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn execute(
operation_config: &crate::OperationConfig,
subscription_id: &str,
location_name: &str,
parameters: &VirtualNetworkSubnetUsageParameter,
) -> std::result::Result<VirtualNetworkSubnetUsageResult, execute::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.DBForMySql/locations/{}/checkVirtualNetworkSubnetUsage",
&operation_config.base_path, subscription_id, location_name
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(execute::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(execute::BuildRequestError)?;
let rsp = client.execute(req).await.context(execute::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(execute::ResponseBytesError)?;
let rsp_value: VirtualNetworkSubnetUsageResult =
serde_json::from_slice(&body).context(execute::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(execute::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(execute::DeserializeError { body })?;
execute::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod execute {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod check_name_availability {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn execute(
operation_config: &crate::OperationConfig,
subscription_id: &str,
name_availability_request: &NameAvailabilityRequest,
) -> std::result::Result<NameAvailability, execute::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.DBForMySql/checkNameAvailability",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(execute::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(name_availability_request);
let req = req_builder.build().context(execute::BuildRequestError)?;
let rsp = client.execute(req).await.context(execute::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(execute::ResponseBytesError)?;
let rsp_value: NameAvailability = serde_json::from_slice(&body).context(execute::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(execute::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(execute::DeserializeError { body })?;
execute::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod execute {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod operations {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationListResult, list::Error> {
let client = &operation_config.client;
let uri_str = &format!("{}/providers/Microsoft.DBForMySql/operations", &operation_config.base_path,);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: OperationListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
list::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
|
use std::collections::HashMap;
use std::fs::File;
use std::io::{BufRead, BufReader};
type BingoIndex = HashMap<usize, (usize, usize)>;
type BingoBoard = [[(usize, bool); 5]; 5];
fn main() {
let filename = "input/input.txt";
let (drawn_numbers, boards) = parse_input_file(filename);
println!("drawn_numbers: {:?}", drawn_numbers);
// println!("boards: {:?}", boards);
boards.iter().for_each(|(bidx, board)| {
println!("{:?}", bidx);
print_board(board)
});
// Part 1
let (final_drawn_number, winning_board) =
match find_first_winning_board(&drawn_numbers, boards.to_owned()) {
None => {
panic!("no winning board found");
}
Some(x) => x,
};
print_score(final_drawn_number, &winning_board);
// Part 2
let (final_drawn_number, winning_board) = match find_last_winning_board(&drawn_numbers, boards)
{
None => {
panic!("no last winning board found");
}
Some(x) => x,
};
print_score(final_drawn_number, &winning_board);
}
fn find_first_winning_board(
drawn_numbers: &Vec<usize>,
mut boards: Vec<(BingoIndex, BingoBoard)>,
) -> Option<(usize, BingoBoard)> {
for &n in drawn_numbers {
boards
.iter_mut()
.for_each(|(bidx, board)| mark_board(bidx, board, n));
let maybe_winning_board = boards.iter().find(|&(_, board)| has_bingo(board));
if let Some(&(_, winning_board)) = maybe_winning_board {
return Some((n, winning_board));
}
}
None
}
fn find_last_winning_board(
drawn_numbers: &Vec<usize>,
mut boards: Vec<(BingoIndex, BingoBoard)>,
) -> Option<(usize, BingoBoard)> {
fn next_winning_board_pos(boards: &Vec<(BingoIndex, BingoBoard)>) -> Option<usize> {
boards
.iter()
.enumerate()
.find(|(_, (_, board))| has_bingo(board))
.map(|(i, _)| i)
}
for &n in drawn_numbers {
boards
.iter_mut()
.for_each(|(bidx, board)| mark_board(bidx, board, n));
let mut maybe_winning_board_pos = next_winning_board_pos(&boards);
while let Some(idx) = maybe_winning_board_pos {
let (_, winning_board) = boards.remove(idx);
if boards.is_empty() {
return Some((n, winning_board));
}
maybe_winning_board_pos = next_winning_board_pos(&boards);
}
}
boards.iter().for_each(|&(_, b)| print_board(&b));
None
}
fn mark_board(bidx: &BingoIndex, board: &mut BingoBoard, n: usize) {
if let Some(&(x, y)) = bidx.get(&n) {
board[x][y].1 = true;
}
}
fn has_bingo(board: &BingoBoard) -> bool {
// check all rows
for i in 0..5 {
let row = board[i];
if row.iter().all(|&(_, marked)| marked) {
return true;
}
}
// check all columns
for j in 0..5 {
let mut col: [bool; 5] = [false; 5];
for i in 0..5 {
col[i] = board[i][j].1;
}
if col.iter().all(|&marked| marked) {
return true;
}
}
false
}
fn sum_of_unmarked(board: &BingoBoard) -> usize {
board
.iter()
.map(|row| {
row.iter()
.filter(|&(_, marked)| *marked == false)
.map(|&(n, _)| n)
.sum::<usize>()
})
.sum()
}
fn parse_input_file(filename: &str) -> (Vec<usize>, Vec<(BingoIndex, BingoBoard)>) {
// Open the file in read-only mode (ignoring errors).
let file = File::open(filename).expect("couldn't open file");
let reader = BufReader::new(file);
// Read the file line by line using the lines() iterator from std::io::BufRead.
let lines: Vec<String> = reader
.lines()
.map(|line_res| line_res.expect("couldn't unwrap line"))
.collect();
let drawn_numbers: Vec<usize> = lines
.first()
.expect("no lines read")
.split(',')
.map(|n| n.parse().unwrap())
.collect();
let boards: Vec<_> = lines
.into_iter()
.skip(1)
.filter(|l| l != "")
.map(|bl| bl.split_whitespace().map(|n| n.parse().unwrap()).collect())
.collect::<Vec<Vec<usize>>>()
.chunks(5)
.map(|bls| {
let mut board: BingoBoard = [[(0, false); 5]; 5];
let mut bidx: BingoIndex = HashMap::new();
for i in 0..5 {
for j in 0..5 {
let &n = bls[i].get(j).unwrap();
if bidx.contains_key(&n) {
panic!("BingoMap already contains number: {}", n);
}
bidx.insert(n, (i, j));
board[i][j].0 = n
}
}
(bidx, board)
})
.collect();
(drawn_numbers, boards)
}
fn print_score(final_drawn_number: usize, winning_board: &BingoBoard) {
println!("final drawn number: {}", final_drawn_number);
print_board(&winning_board);
let sum_of_unmarked_on_winning_board = sum_of_unmarked(&winning_board);
println!(
"sum_of_unmarked_on_winning_board: {}",
sum_of_unmarked_on_winning_board
);
let score = final_drawn_number * sum_of_unmarked_on_winning_board;
println!("score: {}", score);
}
fn print_board(board: &BingoBoard) {
board.iter().for_each(|l| println!("{:?}", l));
println!();
}
|
use super::sanitizer::{should_rebase_url, Sanitizer, SlashPath};
use crate::renderer::RawMessageWriter;
use aho_corasick::AhoCorasick;
use emojis::Emoji;
use memchr::{memchr_iter, Memchr};
use pulldown_cmark::{
Alignment, CodeBlockKind, CowStr, Event, HeadingLevel, LinkType, MathDisplay, Options, Parser,
Tag,
};
use std::cmp;
use std::collections::HashMap;
use std::io::{Read, Result, Write};
use std::iter::Peekable;
use std::marker::PhantomData;
use std::path::Path;
pub type Range = std::ops::Range<usize>;
pub trait TextVisitor: Default {
fn visit(&mut self, text: &str, range: &Range);
}
impl TextVisitor for () {
fn visit(&mut self, _text: &str, _range: &Range) {}
}
#[derive(Clone, Copy, Debug)]
pub enum TokenKind {
Normal,
MatchOther,
MatchCurrent,
MatchOtherStart,
MatchCurrentStart,
}
impl TokenKind {
fn tag(self) -> &'static str {
match self {
Self::MatchOther => "match",
Self::MatchCurrent => "match-current",
Self::MatchOtherStart => "match-start",
Self::MatchCurrentStart => "match-current-start",
Self::Normal => unreachable!(),
}
}
}
pub trait TextTokenizer {
fn tokenize<'t>(&mut self, text: &'t str, range: &Range) -> (TokenKind, &'t str);
}
impl TextTokenizer for () {
fn tokenize<'t>(&mut self, text: &'t str, _range: &Range) -> (TokenKind, &'t str) {
(TokenKind::Normal, text)
}
}
#[derive(Default)]
pub struct MarkdownContent {
source: String,
base_dir: SlashPath,
}
impl MarkdownContent {
pub fn new(source: String, base_dir: Option<&Path>) -> Self {
let base_dir =
if let Some(path) = base_dir { SlashPath::from(path) } else { SlashPath::default() };
Self { source, base_dir }
}
pub fn modified_offset(&self, new: &Self) -> Option<usize> {
let (prev_source, new_source) = (&self.source, &new.source);
prev_source
.as_bytes()
.iter()
.zip(new_source.as_bytes().iter())
.position(|(a, b)| a != b)
.or_else(|| {
let (prev_len, new_len) = (prev_source.len(), new_source.len());
(prev_len != new_len).then_some(cmp::min(prev_len, new_len))
})
}
pub fn is_empty(&self) -> bool {
self.source.is_empty() && self.base_dir.is_empty()
}
}
pub struct MarkdownParser<'a, V: TextVisitor, T: TextTokenizer> {
parser: Parser<'a, 'a>,
base_dir: &'a SlashPath,
offset: Option<usize>,
text_tokenizer: T,
_phantom: PhantomData<V>,
}
impl<'a, V: TextVisitor, T: TextTokenizer> MarkdownParser<'a, V, T> {
pub fn new(content: &'a MarkdownContent, offset: Option<usize>, text_tokenizer: T) -> Self {
let mut options = Options::empty();
options.insert(
Options::ENABLE_STRIKETHROUGH
| Options::ENABLE_FOOTNOTES
| Options::ENABLE_TABLES
| Options::ENABLE_TASKLISTS
| Options::ENABLE_MATH,
);
let parser = Parser::new_ext(&content.source, options);
let base_dir = &content.base_dir;
Self { parser, base_dir, offset, text_tokenizer, _phantom: PhantomData }
}
}
// Note: Build raw JavaScript expression which is evaluated to the render tree encoded as JSON value.
// This expression will be evaluated via `receive(JSON.parse('{"kind":"render_tree",...}'))` by renderer.
impl<'a, V: TextVisitor, T: TextTokenizer> RawMessageWriter for MarkdownParser<'a, V, T> {
type Output = V;
fn write_to(self, writer: impl Write) -> Result<Self::Output> {
let mut enc =
RenderTreeEncoder::new(writer, self.base_dir, self.offset, self.text_tokenizer);
enc.out.write_all(br#"JSON.parse('{"kind":"render_tree","tree":"#)?;
enc.push(self.parser)?;
enc.out.write_all(b"}')")?;
Ok(enc.text_visitor)
}
}
// To know the format of JSON value, see type definitions in web/ipc.ts
enum TableState {
Head,
Row,
}
// Note: Be careful, this function is called in the hot loop on encoding texts
#[inline]
#[allow(clippy::just_underscores_and_digits)]
fn encode_string_byte(mut out: impl Write, b: u8) -> Result<()> {
const BB: u8 = b'b'; // \x08
const TT: u8 = b't'; // \x09
const NN: u8 = b'n'; // \x0a
const FF: u8 = b'f'; // \x0c
const RR: u8 = b'r'; // \x0d
const DQ: u8 = b'"'; // \x22
const SQ: u8 = b'\''; // \x27
const BS: u8 = b'\\'; // \x5c
const XX: u8 = 1; // \x00...\x1f non-printable
const __: u8 = 0;
#[rustfmt::skip]
const ESCAPE_TABLE: [u8; 256] = [
// 0 1 2 3 4 5 6 7 8 9 A B C D E F
XX, XX, XX, XX, XX, XX, XX, XX, BB, TT, NN, XX, FF, RR, XX, XX, // 0
XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, // 1
__, __, DQ, __, __, __, __, SQ, __, __, __, __, __, __, __, __, // 2
__, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 3
__, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 4
__, __, __, __, __, __, __, __, __, __, __, __, BS, __, __, __, // 5
__, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 6
__, __, __, __, __, __, __, __, __, __, __, __, __, __, __, XX, // 7
__, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 8
__, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 9
__, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // A
__, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // B
__, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // C
__, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // D
__, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // E
__, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // F
];
match ESCAPE_TABLE[b as usize] {
__ => out.write_all(&[b]),
BS => out.write_all(br#"\\\\"#), // Escape twice for JS and JSON (\\\\ → \\ → \)
SQ => out.write_all(br#"\'"#), // JSON string will be put in '...' JS string. ' needs to be escaped
XX => write!(out, r#"\\u{:04x}"#, b),
b => out.write_all(&[b'\\', b'\\', b]), // Escape \ itself: JSON.parse('\\n')
}
}
struct StringContentEncoder<W: Write>(W);
impl<W: Write> Write for StringContentEncoder<W> {
fn write(&mut self, buf: &[u8]) -> Result<usize> {
for b in buf.iter().copied() {
encode_string_byte(&mut self.0, b)?;
}
Ok(buf.len())
}
fn flush(&mut self) -> Result<()> {
self.0.flush()
}
}
struct RawHtmlReader<'a, I: Iterator<Item = (Event<'a>, Range)>> {
current: CowStr<'a>,
index: usize,
events: Peekable<I>,
stack: usize,
}
impl<'a, I: Iterator<Item = (Event<'a>, Range)>> RawHtmlReader<'a, I> {
fn new(current: CowStr<'a>, events: Peekable<I>) -> Self {
Self { current, index: 0, events, stack: 1 }
}
fn read_byte(&mut self) -> Option<u8> {
// Current event was consumed. Fetch next event otherwise return `None`.
while self.current.len() <= self.index {
if !matches!(self.events.peek(), Some((Event::Html(_) | Event::Text(_), _)))
|| self.stack == 0
{
return None;
}
self.current = match self.events.next().unwrap().0 {
Event::Html(html) => {
if html.starts_with("</") {
self.stack -= 1;
} else {
self.stack += 1;
}
html
}
Event::Text(text) => text,
_ => unreachable!(),
};
self.index = 0;
}
let b = self.current.as_bytes()[self.index];
self.index += 1;
Some(b)
}
}
impl<'a, I: Iterator<Item = (Event<'a>, Range)>> Read for RawHtmlReader<'a, I> {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
for (i, dest) in buf.iter_mut().enumerate() {
if let Some(b) = self.read_byte() {
*dest = b;
} else {
return Ok(i);
}
}
Ok(buf.len())
}
}
struct RenderTreeEncoder<'a, W: Write, V: TextVisitor, T: TextTokenizer> {
out: W,
base_dir: &'a SlashPath,
table: TableState,
is_start: bool,
ids: HashMap<CowStr<'a>, usize>,
modified: Option<usize>,
text_visitor: V,
text_tokenizer: T,
autolinker: Autolinker,
sanitizer: Sanitizer<'a>,
in_code_block: bool,
}
impl<'a, W: Write, V: TextVisitor, T: TextTokenizer> RenderTreeEncoder<'a, W, V, T> {
fn new(w: W, base_dir: &'a SlashPath, modified: Option<usize>, text_tokenizer: T) -> Self {
Self {
out: w,
base_dir,
table: TableState::Head,
is_start: true,
ids: HashMap::new(),
modified,
text_visitor: V::default(),
text_tokenizer,
autolinker: Autolinker::default(),
sanitizer: Sanitizer::new(base_dir),
in_code_block: false,
}
}
fn push(&mut self, parser: Parser<'a, 'a>) -> Result<()> {
self.out.write_all(b"[")?;
self.events(parser)?;
// Modified offset was not consumed by any text, it would mean that some non-text parts after any text were
// modified. As a fallback, set 'modified' marker after the last text.
if self.modified.is_some() {
self.tag("modified")?;
self.out.write_all(b"}")?;
}
self.out.write_all(b"]")
}
fn string_content(&mut self, s: &str) -> Result<()> {
for b in s.as_bytes().iter().copied() {
encode_string_byte(&mut self.out, b)?;
}
Ok(())
}
fn string(&mut self, s: &str) -> Result<()> {
self.out.write_all(b"\"")?;
self.string_content(s)?;
self.out.write_all(b"\"")
}
fn alignment(&mut self, a: Alignment) -> Result<()> {
self.out.write_all(match a {
Alignment::None => b"null",
Alignment::Left => br#""left""#,
Alignment::Center => br#""center""#,
Alignment::Right => br#""right""#,
})
}
fn id(&mut self, name: CowStr<'a>) -> usize {
let new = self.ids.len() + 1;
*self.ids.entry(name).or_insert(new)
}
fn comma(&mut self) -> Result<()> {
if !self.is_start {
self.out.write_all(b",")?;
} else {
self.is_start = false;
}
Ok(())
}
fn tag(&mut self, name: &str) -> Result<()> {
self.comma()?;
write!(self.out, r#"{{"t":"{}""#, name)
}
fn text_tokens(&mut self, mut input: &str, mut range: Range) -> Result<()> {
use TokenKind::*;
while !input.is_empty() {
let (token, text) = self.text_tokenizer.tokenize(input, &range);
match token {
Normal => {
self.comma()?;
self.string(text)?;
}
MatchOther | MatchCurrent | MatchOtherStart | MatchCurrentStart => {
self.tag(token.tag())?;
self.children_begin()?;
self.string(text)?;
self.tag_end()?;
}
}
input = &input[text.len()..];
range.start += text.len();
}
Ok(())
}
fn text(&mut self, text: &str, range: Range) -> Result<()> {
self.text_visitor.visit(text, &range);
let Some(offset) = self.modified else {
return self.text_tokens(text, range);
};
let Range { start, end } = range;
if end < offset {
return self.text_tokens(text, range);
}
// Handle the last modified offset with this text token
self.modified = None;
log::debug!("Handling last modified offset: {:?}", offset);
if offset <= start {
self.tag("modified")?;
self.out.write_all(b"}")?;
self.text_tokens(text, range)
} else if end == offset {
self.text_tokens(text, range)?;
self.tag("modified")?;
self.out.write_all(b"}")
} else {
let i = offset - start;
self.text_tokens(&text[..i], range.start..offset)?;
self.tag("modified")?;
self.out.write_all(b"}")?;
self.text_tokens(&text[i..], offset..range.end)
}
}
fn emoji_text(&mut self, text: &str, range: Range) -> Result<()> {
let mut start = range.start;
for token in EmojiTokenizer::new(text) {
match token {
EmojiToken::Text(text) => {
if !text.is_empty() {
self.text(text, start..start + text.len())?;
start += text.len();
}
}
EmojiToken::Emoji(emoji, len) => {
self.tag("emoji")?;
self.out.write_all(br#","name":"#)?;
self.string(emoji.name())?;
self.children_begin()?;
self.string(emoji.as_str())?;
self.tag_end()?;
start += len;
}
}
}
// Note: When some escaped text is included in input like "&", `start == range.end` invariant is violated here.
// That's OK because pulldown-cmark tokenizes any escaped text as small as possible to reduce extra heap allocation.
// For instance "foo & bar" is tokenized into three events Text("foo "), Text("&"), Test(" bar"). It means that
// any escaped charactor is followed by no text within the token.
Ok(())
}
fn autolink_text(&mut self, mut text: &str, range: Range) -> Result<()> {
let Range { mut start, end } = range;
while let Some((s, e)) = self.autolinker.find_autolink(text) {
if s > 0 {
self.emoji_text(&text[..s], start..start + s)?;
}
let url = &text[s..e];
log::debug!("Auto-linking URL: {}", url);
self.tag("a")?;
self.out.write_all(br#","auto":true,"href":"#)?;
self.string(url)?;
self.children_begin()?;
self.text(url, start + s..start + e)?;
self.tag_end()?;
text = &text[e..];
start += e;
}
if !text.is_empty() {
self.emoji_text(text, start..end)?;
}
Ok(())
}
fn events(&mut self, parser: Parser<'a, 'a>) -> Result<()> {
use Event::*;
let mut events = parser.into_offset_iter().peekable();
while let Some((event, range)) = events.next() {
match event {
Start(tag) => {
let next_event = events.peek().map(|(e, _)| e);
self.start_tag(tag, next_event)?;
}
End(tag) => self.end_tag(tag)?,
Text(text) if self.in_code_block => self.text(&text, range)?,
Text(text) => self.autolink_text(&text, range)?,
Code(text) => {
let pad = (range.len() - text.len()) / 2;
let inner_range = (range.start + pad)..(range.end - pad);
self.tag("code")?;
self.children_begin()?;
self.text(&text, inner_range)?;
self.tag_end()?;
}
Html(html) => {
self.tag("html")?;
self.out.write_all(br#","raw":""#)?;
let mut dst = StringContentEncoder(&mut self.out);
let mut src = RawHtmlReader::new(html, events);
self.sanitizer.clean(&mut dst, &mut src)?;
events = src.events;
self.out.write_all(br#""}"#)?;
}
SoftBreak => self.text("\n", range)?,
HardBreak => {
self.tag("br")?;
self.out.write_all(b"}")?;
}
Rule => {
self.tag("hr")?;
self.out.write_all(b"}")?;
}
FootnoteReference(name) => {
self.tag("fn-ref")?;
let id = self.id(name);
write!(self.out, r#","id":{}}}"#, id)?;
}
TaskListMarker(checked) => {
self.tag("checkbox")?;
write!(self.out, r#","checked":{}}}"#, checked)?;
}
Math(display, text) => {
self.tag("math")?;
write!(self.out, r#","inline":{},"expr":"#, display == MathDisplay::Inline)?;
self.string(&text)?;
self.out.write_all(b"}")?;
}
}
}
Ok(())
}
fn rebase_link(&mut self, dest: &str) -> Result<()> {
if !should_rebase_url(dest) {
return self.string(dest);
}
// Rebase 'foo/bar/' with '/path/to/base' as '/path/to/base/foo/bar'
self.out.write_all(b"\"")?;
self.string_content(self.base_dir)?;
if !dest.starts_with('/') {
self.out.write_all(b"/")?;
}
self.string_content(dest)?;
self.out.write_all(b"\"")
}
fn children_begin(&mut self) -> Result<()> {
self.is_start = true;
self.out.write_all(br#","c":["#)
}
fn tag_end(&mut self) -> Result<()> {
self.is_start = false;
self.out.write_all(b"]}")
}
fn start_tag(&mut self, tag: Tag<'a>, next: Option<&Event>) -> Result<()> {
use Tag::*;
match tag {
Paragraph => {
self.tag("p")?;
}
Heading(level, id, _) => {
self.tag("h")?;
let level: u8 = match level {
HeadingLevel::H1 => 1,
HeadingLevel::H2 => 2,
HeadingLevel::H3 => 3,
HeadingLevel::H4 => 4,
HeadingLevel::H5 => 5,
HeadingLevel::H6 => 6,
};
write!(self.out, r#","level":{}"#, level)?;
if let Some(id) = id {
self.out.write_all(br#","id":"#)?;
self.string(id)?;
}
}
Table(alignments) => {
self.tag("table")?;
self.out.write_all(br#","align":["#)?;
let mut alignments = alignments.into_iter();
if let Some(a) = alignments.next() {
self.alignment(a)?;
}
for a in alignments {
self.out.write_all(b",")?;
self.alignment(a)?;
}
self.out.write_all(b"]")?;
}
TableHead => {
self.table = TableState::Head;
self.tag("thead")?;
self.children_begin()?;
self.tag("tr")?;
}
TableRow => {
self.table = TableState::Row;
self.tag("tr")?;
}
TableCell => {
let tag = match self.table {
TableState::Head => "th",
TableState::Row => "td",
};
self.tag(tag)?;
}
BlockQuote => {
self.tag("blockquote")?;
}
CodeBlock(info) => {
self.tag("pre")?;
self.children_begin()?;
self.tag("code")?;
if let CodeBlockKind::Fenced(info) = info {
if let Some(lang) = info.split(' ').next() {
if !lang.is_empty() {
self.out.write_all(br#","lang":"#)?;
self.string(lang)?;
}
}
}
self.in_code_block = true;
}
List(Some(1)) => self.tag("ol")?,
List(Some(start)) => {
self.tag("ol")?;
write!(self.out, r#","start":{}"#, start)?;
}
List(None) => self.tag("ul")?,
Item => {
if let Some(Event::TaskListMarker(_)) = next {
self.tag("task-list")?;
} else {
self.tag("li")?;
}
}
Emphasis => self.tag("em")?,
Strong => self.tag("strong")?,
Strikethrough => self.tag("del")?,
Link(LinkType::Autolink, _, _) => return Ok(()), // Ignore autolink since it is linked by `Autolinker`
Link(link_type, dest, title) => {
self.tag("a")?;
self.out.write_all(br#","href":"#)?;
match link_type {
LinkType::Email => {
self.out.write_all(b"\"mailto:")?;
self.string_content(&dest)?;
self.out.write_all(b"\"")?;
}
_ => self.rebase_link(&dest)?,
}
if !title.is_empty() {
self.out.write_all(br#","title":"#)?;
self.string(&title)?;
}
}
Image(_link_type, dest, title) => {
self.tag("img")?;
if !title.is_empty() {
self.out.write_all(br#","title":"#)?;
self.string(&title)?;
}
self.out.write_all(br#","src":"#)?;
self.rebase_link(&dest)?;
}
FootnoteDefinition(name) => {
self.tag("fn-def")?;
if !name.is_empty() {
self.out.write_all(br#","name":"#)?;
self.string(&name)?;
}
let id = self.id(name);
write!(self.out, r#","id":{}"#, id)?;
}
}
// Tag element must have its children (maybe empty)
self.children_begin()
}
fn end_tag(&mut self, tag: Tag<'a>) -> Result<()> {
use Tag::*;
match tag {
Link(LinkType::Autolink, _, _) => Ok(()), // Ignore autolink since it is linked by `Autolinker`
Paragraph
| Heading(_, _, _)
| TableRow
| TableCell
| BlockQuote
| List(_)
| Item
| Emphasis
| Strong
| Strikethrough
| Link(_, _, _)
| Image(_, _, _)
| FootnoteDefinition(_) => self.tag_end(),
CodeBlock(_) => {
self.in_code_block = false;
self.tag_end()?;
self.tag_end()
}
Table(_) => {
self.tag_end()?;
self.tag_end()
}
TableHead => {
self.tag_end()?;
self.tag_end()?;
self.tag("tbody")?;
self.children_begin()
}
}
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
enum UrlCharKind {
Invalid,
Term,
NonTerm,
}
impl UrlCharKind {
fn of(c: char) -> Self {
// https://www.rfc-editor.org/rfc/rfc3987
match c {
'\u{00}'..='\u{1F}'
| ' '
| '|'
| '"'
| '<'
| '>'
| '`'
| '('
| ')'
| '['
| ']'
| '\u{7F}'..='\u{9F}' => Self::Invalid,
'?' | '!' | '.' | ',' | ':' | ';' | '*' | '&' | '\\' | '{' | '}' | '\'' => {
Self::NonTerm
}
_ => Self::Term,
}
}
}
struct Autolinker(AhoCorasick);
impl Default for Autolinker {
fn default() -> Self {
Self(AhoCorasick::new(["https://", "http://"]).unwrap())
}
}
impl Autolinker {
fn find_autolink(&self, text: &str) -> Option<(usize, usize)> {
for mat in self.0.find_iter(text) {
let (start, scheme_end) = (mat.start(), mat.end());
if let Some(c) = text[..start].chars().next_back() {
if c.is_ascii_alphabetic() {
// Note: "foohttp://example.com" is not URL but "123http://example.com" contains URL
continue;
}
}
let mut len = 0;
for (i, c) in text[scheme_end..].char_indices() {
match UrlCharKind::of(c) {
UrlCharKind::Invalid => break,
UrlCharKind::Term => {
len = i + c.len_utf8();
}
UrlCharKind::NonTerm => {}
}
}
if len > 0 {
return Some((start, scheme_end + len));
}
}
None
}
}
#[derive(Debug)]
enum EmojiToken<'a> {
Text(&'a str),
Emoji(&'static Emoji, usize),
}
struct EmojiTokenizer<'a> {
text: &'a str,
iter: Memchr<'a>,
start: usize,
}
impl<'a> EmojiTokenizer<'a> {
fn new(text: &'a str) -> Self {
Self { iter: memchr_iter(b':', text.as_bytes()), text, start: 0 }
}
fn eat(&mut self, end: usize) -> &'a str {
let text = &self.text[self.start..end];
self.start = end;
text
}
}
impl<'a> Iterator for EmojiTokenizer<'a> {
type Item = EmojiToken<'a>;
// Tokenizing example:
// "foo :dog: bar :piyo: wow"
// -> ":dog: bar :piyo: wow" (text "foo ")
// -> " bar :piyo: wow" (emoji "dog")
// -> ":piyo: wow" (text " bar ")
// -> ": wow" (text ":piyo")
// -> "" (text ": wow")
fn next(&mut self) -> Option<Self::Item> {
if self.start == self.text.len() {
return None;
}
let Some(end) = self.iter.next() else {
return Some(EmojiToken::Text(self.eat(self.text.len()))); // Eat all of rest
};
if self.start == end {
// Edge case: The initial input text starts with ':'
return self.next();
}
if !self.text[self.start..].starts_with(':') {
return Some(EmojiToken::Text(self.eat(end)));
}
// Note:
// text[start..end+1] == ":dog:"
// text[start+1..end] == "dog"
// text[start..end] == ":dog"
let short = &self.text[self.start + 1..end];
if let Some(emoji) = emojis::get_by_shortcode(short) {
self.start = end + 1;
Some(EmojiToken::Emoji(emoji, short.len() + 2))
} else {
Some(EmojiToken::Text(self.eat(end)))
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use std::path::PathBuf;
fn load_data(name: &str) -> String {
let mut path = PathBuf::from("src");
path.push("markdown");
path.push("testdata");
path.push(format!("{}.md", name));
match fs::read_to_string(&path) {
Ok(text) => text,
Err(err) => panic!("Could not find Markdown test data at {:?}: {}", path, err),
}
}
macro_rules! snapshot_test {
($name:ident, $offset:expr, $basedir:expr) => {
#[test]
fn $name() {
let source = load_data(stringify!($name));
let target = MarkdownContent::new(source, $basedir);
let parser = MarkdownParser::new(&target, $offset, ());
let mut buf = Vec::new();
let () = parser.write_to(&mut buf).unwrap();
let buf = String::from_utf8(buf).unwrap();
// Revert extra escape for '...' JavaScript string
let buf = buf.replace("\\\\", "\\");
// Remove the `JSON.parse` call to restore JSON value passed to the function
let buf = buf.strip_prefix("JSON.parse('").unwrap();
let buf = buf.strip_suffix("')").unwrap();
// Check if the written output is in the valid JSON format
let json: serde_json::Value = match serde_json::from_str(buf) {
Ok(value) => value,
Err(err) => {
panic!("Invalid JSON input with error \"{}\": {}", err, buf);
}
};
insta::assert_json_snapshot!(json);
}
};
($name:ident) => {
snapshot_test!($name, None, None);
};
($name:ident, $offset:expr) => {
snapshot_test!($name, $offset, None);
};
}
snapshot_test!(paragraph);
snapshot_test!(blockquote);
snapshot_test!(list);
snapshot_test!(headings);
snapshot_test!(codeblock);
snapshot_test!(link);
snapshot_test!(html);
snapshot_test!(sanitized);
snapshot_test!(inline_code);
snapshot_test!(emphasis);
snapshot_test!(image);
snapshot_test!(autolink);
snapshot_test!(emoji);
snapshot_test!(table);
snapshot_test!(math);
snapshot_test!(strikethrough);
snapshot_test!(tasklist);
snapshot_test!(footnotes);
snapshot_test!(highlight);
snapshot_test!(not_link);
// Offset
snapshot_test!(offset_block, Some(30));
snapshot_test!(offset_begin, Some(0));
snapshot_test!(offset_after_end, Some(10000000));
snapshot_test!(offset_in_emphasis, Some(10));
// Relative link resolutions
#[cfg(target_os = "windows")]
const BASE_DIR: &str = r#"\a\b\c\d\e"#;
#[cfg(not(target_os = "windows"))]
const BASE_DIR: &str = "/a/b/c/d/e";
snapshot_test!(relative_links, None, Some(Path::new(BASE_DIR)));
mod visitor {
use super::*;
use crate::markdown::DisplayText;
macro_rules! snapshot_test {
($name:ident) => {
#[test]
fn $name() {
let source = load_data(stringify!($name));
let content = MarkdownContent::new(source, None);
let parser = MarkdownParser::new(&content, None, ());
let mut buf = Vec::new();
let visitor: DisplayText = parser.write_to(&mut buf).unwrap();
let text = &visitor.raw_text();
let source = &content.source;
let mut mapped = vec![];
for map in visitor.sourcemap() {
let slice = &source[map.clone()];
assert!(
source.contains(slice),
"{:?} does not contain {:?}",
source,
text,
);
mapped.push((slice, map.clone()));
}
insta::assert_debug_snapshot!((text, mapped));
}
};
}
snapshot_test!(paragraph);
snapshot_test!(blockquote);
snapshot_test!(list);
snapshot_test!(headings);
snapshot_test!(codeblock);
snapshot_test!(link);
snapshot_test!(html);
snapshot_test!(sanitized);
snapshot_test!(inline_code);
snapshot_test!(emphasis);
snapshot_test!(image);
snapshot_test!(autolink);
snapshot_test!(emoji);
snapshot_test!(table);
snapshot_test!(math);
snapshot_test!(strikethrough);
snapshot_test!(tasklist);
snapshot_test!(footnotes);
snapshot_test!(highlight);
snapshot_test!(not_link);
}
#[test]
fn emoji_tokenizer() {
#[derive(PartialEq, Eq, Debug)]
enum Tok {
T(&'static str),
E(&'static str, usize),
}
for (input, expected) in [
(":dog:", &[Tok::E("dog face", 5)][..]),
(":nerd_face:", &[Tok::E("nerd face", 11)][..]),
(":+1:", &[Tok::E("thumbs up", 4)][..]),
(":-1:", &[Tok::E("thumbs down", 4)][..]),
(":dog::cat:", &[Tok::E("dog face", 5), Tok::E("cat face", 5)][..]),
(":dog: :cat:", &[Tok::E("dog face", 5), Tok::T(" "), Tok::E("cat face", 5)][..]),
(
" :dog: :cat: ",
&[
Tok::T(" "),
Tok::E("dog face", 5),
Tok::T(" "),
Tok::E("cat face", 5),
Tok::T(" "),
][..],
),
(
"hello :dog: world :cat: nyan",
&[
Tok::T("hello "),
Tok::E("dog face", 5),
Tok::T(" world "),
Tok::E("cat face", 5),
Tok::T(" nyan"),
][..],
),
("hello, world", &[Tok::T("hello, world")][..]),
("", &[][..]),
("dog:", &[Tok::T("dog"), Tok::T(":")][..]),
(":dog", &[Tok::T(":dog")][..]),
(":this-is-not-an-emoji:", &[Tok::T(":this-is-not-an-emoji"), Tok::T(":")][..]),
(
":not-emoji:not-emoji:dog:",
&[Tok::T(":not-emoji"), Tok::T(":not-emoji"), Tok::E("dog face", 5)][..],
),
(
":not-emoji:not-emoji:dog:",
&[Tok::T(":not-emoji"), Tok::T(":not-emoji"), Tok::E("dog face", 5)][..],
),
("::::", &[Tok::T(":"), Tok::T(":"), Tok::T(":"), Tok::T(":")][..]),
] {
let actual = EmojiTokenizer::new(input)
.map(|tok| match tok {
EmojiToken::Text(text) => Tok::T(text),
EmojiToken::Emoji(emoji, len) => Tok::E(emoji.name(), len),
})
.collect::<Vec<_>>();
assert_eq!(expected, actual, "input={:?}", input);
}
}
#[test]
fn auto_linker() {
for (input, url) in [
("http://example.com", Some("http://example.com")),
("https://example.com", Some("https://example.com")),
("http://example.com/foo", Some("http://example.com/foo")),
("http://example.com/foo/", Some("http://example.com/foo/")),
("http://example.com&foo=bar", Some("http://example.com&foo=bar")),
("hello http://example.com world", Some("http://example.com")),
("[foo](http://example.com)", Some("http://example.com")),
("[http://example.com]", Some("http://example.com")),
("Nice URL https://example.com!", Some("https://example.com")),
("This is URL https://example.com.", Some("https://example.com")),
("Is this URL https://example.com?", Some("https://example.com")),
("He said 'https://example.com'", Some("https://example.com")),
("Open https://example.com, and click button", Some("https://example.com")),
("https://example.com&", Some("https://example.com")),
("123http://aaa.com", Some("http://aaa.com")),
("file:///foo/bar", None),
("", None),
("hello, world", None),
("http:", None),
("http://", None),
("foohttp://aaa.com", None),
] {
let found = Autolinker::default().find_autolink(input);
assert_eq!(
url.is_some(),
found.is_some(),
"input={input:?}, found={found:?}, expected={url:?}",
);
if let Some(url) = url {
let (s, e) = found.unwrap();
assert_eq!(url, &input[s..e]);
}
}
}
}
|
use {
crate::{
client::{self, RequestType},
entities::*,
Client,
},
std::error::Error,
};
/// Get information about a specific artist
pub async fn get(client: &Client, id: i64) -> Result<Artist, Box<dyn Error>> {
Ok(client::perform_request::<GeneralMessage>(
RequestType::Get,
format!("/artists/{}", id),
Some(client),
)
.await?
.1
.artist)
}
/// Get information about every artist known to listen.moe
pub async fn get_all(listen_moe_client: &Client) -> Result<Vec<Artist>, Box<dyn Error>> {
Ok(client::perform_request::<GeneralMessage>(
RequestType::Get,
"/artists".into(),
Some(listen_moe_client),
)
.await?
.1
.artists)
}
|
use std::sync::{RwLock, Arc};
use std::collections::HashMap;
use crate::view::View;
use crate::message::{PrePrepare, Prepare, Commit};
use libp2p::PeerId;
pub struct State {
current_view: Arc<RwLock<View>>,
pre_prepares: HashMap<PrePrepareKey, PrePrepare>,
prepares: HashMap<PrepareKey, HashMap<PeerId, Prepare>>,
commits: HashMap<CommitKey, HashMap<PeerId, Commit>>,
// The timestamp in the last reply this node sent to the client
last_timestamp: u64,
}
#[derive(PartialEq, Eq, Hash)]
struct PrePrepareKey(u64, u64); // (view, sequence_number)
#[derive(PartialEq, Debug, Eq, Hash)]
struct PrepareKey(u64, u64);// (view, sequence_number)
#[derive(PartialEq, Eq, Hash)]
struct CommitKey(u64); // view
impl State {
pub fn new() -> Self {
Self {
current_view: Arc::new(RwLock::new(View::new())),
pre_prepares: HashMap::new(),
prepares: HashMap::new(),
commits: HashMap::new(),
last_timestamp: 0,
}
}
pub fn current_view(&self) -> u64 {
self.current_view.read().unwrap().value()
}
pub fn insert_pre_prepare(&mut self, pre_prepare: PrePrepare) {
println!("[State::insert_pre_prepare] The PrePrepare message has been stored into logs: {}", pre_prepare);
self.pre_prepares.insert(
PrePrepareKey(pre_prepare.view(), pre_prepare.sequence_number()),
pre_prepare
);
}
pub fn insert_prepare(&mut self, peer_id: PeerId, prepare: Prepare) {
println!("[State::insert_prepare] The Prepare message has been stored into logs: {}", prepare);
let key = PrepareKey(prepare.view(), prepare.sequence_number());
let p = self.prepares
.entry(key)
.or_insert(HashMap::new());
p.insert(peer_id, prepare);
}
pub fn insert_commit(&mut self, peer_id: PeerId, commit: Commit) {
println!("[State::insert_commit] The Commit message has been stored into logs: {}", commit);
let key = CommitKey(commit.view());
let c = self.commits
.entry(key)
.or_insert(HashMap::new());
c.insert(peer_id, commit);
}
pub fn prepare_len(&self, view: u64, sequence_number: u64) -> usize {
self.prepares.get(&PrepareKey(view, sequence_number)).unwrap().len()
}
pub fn commit_len(&self, view: u64) -> usize {
self.commits.get(&CommitKey(view)).unwrap().len()
}
pub fn get_pre_prepare(&self, pre_prepare: &PrePrepare) -> Option<&PrePrepare> {
self.pre_prepares.get(&PrePrepareKey(pre_prepare.view(), pre_prepare.sequence_number()))
}
pub fn get_pre_prepare_by_key(&self, view: u64, sequence_number: u64) -> Option<&PrePrepare> {
self.pre_prepares.get(&PrePrepareKey(view, sequence_number))
}
pub fn last_timestamp(&self) -> u64 {
self.last_timestamp
}
pub fn update_last_timestamp(&mut self, timestamp: u64) {
println!("[State::update_last_timestamp] updated the timestamp from {:?} to {:?}", self.last_timestamp, timestamp);
self.last_timestamp = timestamp;
}
} |
// Generated from mat.rs.tera template. Edit the template, not the generated file.
use crate::{
coresimd::*, f32::math, swizzles::*, DMat4, EulerRot, Mat3, Mat3A, Quat, Vec3, Vec3A, Vec4,
};
#[cfg(not(target_arch = "spirv"))]
use core::fmt;
use core::iter::{Product, Sum};
use core::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign};
use core::simd::{Which::*, *};
/// Creates a 4x4 matrix from four column vectors.
#[inline(always)]
pub const fn mat4(x_axis: Vec4, y_axis: Vec4, z_axis: Vec4, w_axis: Vec4) -> Mat4 {
Mat4::from_cols(x_axis, y_axis, z_axis, w_axis)
}
/// A 4x4 column major matrix.
///
/// This 4x4 matrix type features convenience methods for creating and using affine transforms and
/// perspective projections. If you are primarily dealing with 3D affine transformations
/// considering using [`Affine3A`](crate::Affine3A) which is faster than a 4x4 matrix
/// for some affine operations.
///
/// Affine transformations including 3D translation, rotation and scale can be created
/// using methods such as [`Self::from_translation()`], [`Self::from_quat()`],
/// [`Self::from_scale()`] and [`Self::from_scale_rotation_translation()`].
///
/// Orthographic projections can be created using the methods [`Self::orthographic_lh()`] for
/// left-handed coordinate systems and [`Self::orthographic_rh()`] for right-handed
/// systems. The resulting matrix is also an affine transformation.
///
/// The [`Self::transform_point3()`] and [`Self::transform_vector3()`] convenience methods
/// are provided for performing affine transformations on 3D vectors and points. These
/// multiply 3D inputs as 4D vectors with an implicit `w` value of `1` for points and `0`
/// for vectors respectively. These methods assume that `Self` contains a valid affine
/// transform.
///
/// Perspective projections can be created using methods such as
/// [`Self::perspective_lh()`], [`Self::perspective_infinite_lh()`] and
/// [`Self::perspective_infinite_reverse_lh()`] for left-handed co-ordinate systems and
/// [`Self::perspective_rh()`], [`Self::perspective_infinite_rh()`] and
/// [`Self::perspective_infinite_reverse_rh()`] for right-handed co-ordinate systems.
///
/// The resulting perspective project can be use to transform 3D vectors as points with
/// perspective correction using the [`Self::project_point3()`] convenience method.
#[derive(Clone, Copy)]
#[repr(C)]
pub struct Mat4 {
pub x_axis: Vec4,
pub y_axis: Vec4,
pub z_axis: Vec4,
pub w_axis: Vec4,
}
impl Mat4 {
/// A 4x4 matrix with all elements set to `0.0`.
pub const ZERO: Self = Self::from_cols(Vec4::ZERO, Vec4::ZERO, Vec4::ZERO, Vec4::ZERO);
/// A 4x4 identity matrix, where all diagonal elements are `1`, and all off-diagonal elements are `0`.
pub const IDENTITY: Self = Self::from_cols(Vec4::X, Vec4::Y, Vec4::Z, Vec4::W);
/// All NAN:s.
pub const NAN: Self = Self::from_cols(Vec4::NAN, Vec4::NAN, Vec4::NAN, Vec4::NAN);
#[allow(clippy::too_many_arguments)]
#[inline(always)]
const fn new(
m00: f32,
m01: f32,
m02: f32,
m03: f32,
m10: f32,
m11: f32,
m12: f32,
m13: f32,
m20: f32,
m21: f32,
m22: f32,
m23: f32,
m30: f32,
m31: f32,
m32: f32,
m33: f32,
) -> Self {
Self {
x_axis: Vec4::new(m00, m01, m02, m03),
y_axis: Vec4::new(m10, m11, m12, m13),
z_axis: Vec4::new(m20, m21, m22, m23),
w_axis: Vec4::new(m30, m31, m32, m33),
}
}
/// Creates a 4x4 matrix from four column vectors.
#[inline(always)]
pub const fn from_cols(x_axis: Vec4, y_axis: Vec4, z_axis: Vec4, w_axis: Vec4) -> Self {
Self {
x_axis,
y_axis,
z_axis,
w_axis,
}
}
/// Creates a 4x4 matrix from a `[f32; 16]` array stored in column major order.
/// If your data is stored in row major you will need to `transpose` the returned
/// matrix.
#[inline]
pub const fn from_cols_array(m: &[f32; 16]) -> Self {
Self::new(
m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8], m[9], m[10], m[11], m[12], m[13],
m[14], m[15],
)
}
/// Creates a `[f32; 16]` array storing data in column major order.
/// If you require data in row major order `transpose` the matrix first.
#[inline]
pub const fn to_cols_array(&self) -> [f32; 16] {
let [x_axis_x, x_axis_y, x_axis_z, x_axis_w] = self.x_axis.to_array();
let [y_axis_x, y_axis_y, y_axis_z, y_axis_w] = self.y_axis.to_array();
let [z_axis_x, z_axis_y, z_axis_z, z_axis_w] = self.z_axis.to_array();
let [w_axis_x, w_axis_y, w_axis_z, w_axis_w] = self.w_axis.to_array();
[
x_axis_x, x_axis_y, x_axis_z, x_axis_w, y_axis_x, y_axis_y, y_axis_z, y_axis_w,
z_axis_x, z_axis_y, z_axis_z, z_axis_w, w_axis_x, w_axis_y, w_axis_z, w_axis_w,
]
}
/// Creates a 4x4 matrix from a `[[f32; 4]; 4]` 4D array stored in column major order.
/// If your data is in row major order you will need to `transpose` the returned
/// matrix.
#[inline]
pub const fn from_cols_array_2d(m: &[[f32; 4]; 4]) -> Self {
Self::from_cols(
Vec4::from_array(m[0]),
Vec4::from_array(m[1]),
Vec4::from_array(m[2]),
Vec4::from_array(m[3]),
)
}
/// Creates a `[[f32; 4]; 4]` 4D array storing data in column major order.
/// If you require data in row major order `transpose` the matrix first.
#[inline]
pub const fn to_cols_array_2d(&self) -> [[f32; 4]; 4] {
[
self.x_axis.to_array(),
self.y_axis.to_array(),
self.z_axis.to_array(),
self.w_axis.to_array(),
]
}
/// Creates a 4x4 matrix with its diagonal set to `diagonal` and all other entries set to 0.
#[doc(alias = "scale")]
#[inline]
pub const fn from_diagonal(diagonal: Vec4) -> Self {
// diagonal.x, diagonal.y etc can't be done in a const-context
let [x, y, z, w] = diagonal.to_array();
Self::new(
x, 0.0, 0.0, 0.0, 0.0, y, 0.0, 0.0, 0.0, 0.0, z, 0.0, 0.0, 0.0, 0.0, w,
)
}
#[inline]
fn quat_to_axes(rotation: Quat) -> (Vec4, Vec4, Vec4) {
glam_assert!(rotation.is_normalized());
let (x, y, z, w) = rotation.into();
let x2 = x + x;
let y2 = y + y;
let z2 = z + z;
let xx = x * x2;
let xy = x * y2;
let xz = x * z2;
let yy = y * y2;
let yz = y * z2;
let zz = z * z2;
let wx = w * x2;
let wy = w * y2;
let wz = w * z2;
let x_axis = Vec4::new(1.0 - (yy + zz), xy + wz, xz - wy, 0.0);
let y_axis = Vec4::new(xy - wz, 1.0 - (xx + zz), yz + wx, 0.0);
let z_axis = Vec4::new(xz + wy, yz - wx, 1.0 - (xx + yy), 0.0);
(x_axis, y_axis, z_axis)
}
/// Creates an affine transformation matrix from the given 3D `scale`, `rotation` and
/// `translation`.
///
/// The resulting matrix can be used to transform 3D points and vectors. See
/// [`Self::transform_point3()`] and [`Self::transform_vector3()`].
///
/// # Panics
///
/// Will panic if `rotation` is not normalized when `glam_assert` is enabled.
#[inline]
pub fn from_scale_rotation_translation(scale: Vec3, rotation: Quat, translation: Vec3) -> Self {
let (x_axis, y_axis, z_axis) = Self::quat_to_axes(rotation);
Self::from_cols(
x_axis.mul(scale.x),
y_axis.mul(scale.y),
z_axis.mul(scale.z),
Vec4::from((translation, 1.0)),
)
}
/// Creates an affine transformation matrix from the given 3D `translation`.
///
/// The resulting matrix can be used to transform 3D points and vectors. See
/// [`Self::transform_point3()`] and [`Self::transform_vector3()`].
///
/// # Panics
///
/// Will panic if `rotation` is not normalized when `glam_assert` is enabled.
#[inline]
pub fn from_rotation_translation(rotation: Quat, translation: Vec3) -> Self {
let (x_axis, y_axis, z_axis) = Self::quat_to_axes(rotation);
Self::from_cols(x_axis, y_axis, z_axis, Vec4::from((translation, 1.0)))
}
/// Extracts `scale`, `rotation` and `translation` from `self`. The input matrix is
/// expected to be a 3D affine transformation matrix otherwise the output will be invalid.
///
/// # Panics
///
/// Will panic if the determinant of `self` is zero or if the resulting scale vector
/// contains any zero elements when `glam_assert` is enabled.
#[inline]
pub fn to_scale_rotation_translation(&self) -> (Vec3, Quat, Vec3) {
let det = self.determinant();
glam_assert!(det != 0.0);
let scale = Vec3::new(
self.x_axis.length() * math::signum(det),
self.y_axis.length(),
self.z_axis.length(),
);
glam_assert!(scale.cmpne(Vec3::ZERO).all());
let inv_scale = scale.recip();
let rotation = Quat::from_rotation_axes(
self.x_axis.mul(inv_scale.x).xyz(),
self.y_axis.mul(inv_scale.y).xyz(),
self.z_axis.mul(inv_scale.z).xyz(),
);
let translation = self.w_axis.xyz();
(scale, rotation, translation)
}
/// Creates an affine transformation matrix from the given `rotation` quaternion.
///
/// The resulting matrix can be used to transform 3D points and vectors. See
/// [`Self::transform_point3()`] and [`Self::transform_vector3()`].
///
/// # Panics
///
/// Will panic if `rotation` is not normalized when `glam_assert` is enabled.
#[inline]
pub fn from_quat(rotation: Quat) -> Self {
let (x_axis, y_axis, z_axis) = Self::quat_to_axes(rotation);
Self::from_cols(x_axis, y_axis, z_axis, Vec4::W)
}
/// Creates an affine transformation matrix from the given 3x3 linear transformation
/// matrix.
///
/// The resulting matrix can be used to transform 3D points and vectors. See
/// [`Self::transform_point3()`] and [`Self::transform_vector3()`].
#[inline]
pub fn from_mat3(m: Mat3) -> Self {
Self::from_cols(
Vec4::from((m.x_axis, 0.0)),
Vec4::from((m.y_axis, 0.0)),
Vec4::from((m.z_axis, 0.0)),
Vec4::W,
)
}
/// Creates an affine transformation matrix from the given 3x3 linear transformation
/// matrix.
///
/// The resulting matrix can be used to transform 3D points and vectors. See
/// [`Self::transform_point3()`] and [`Self::transform_vector3()`].
#[inline]
pub fn from_mat3a(m: Mat3A) -> Self {
Self::from_cols(
Vec4::from((m.x_axis, 0.0)),
Vec4::from((m.y_axis, 0.0)),
Vec4::from((m.z_axis, 0.0)),
Vec4::W,
)
}
/// Creates an affine transformation matrix from the given 3D `translation`.
///
/// The resulting matrix can be used to transform 3D points and vectors. See
/// [`Self::transform_point3()`] and [`Self::transform_vector3()`].
#[inline]
pub fn from_translation(translation: Vec3) -> Self {
Self::from_cols(
Vec4::X,
Vec4::Y,
Vec4::Z,
Vec4::new(translation.x, translation.y, translation.z, 1.0),
)
}
/// Creates an affine transformation matrix containing a 3D rotation around a normalized
/// rotation `axis` of `angle` (in radians).
///
/// The resulting matrix can be used to transform 3D points and vectors. See
/// [`Self::transform_point3()`] and [`Self::transform_vector3()`].
///
/// # Panics
///
/// Will panic if `axis` is not normalized when `glam_assert` is enabled.
#[inline]
pub fn from_axis_angle(axis: Vec3, angle: f32) -> Self {
glam_assert!(axis.is_normalized());
let (sin, cos) = math::sin_cos(angle);
let axis_sin = axis.mul(sin);
let axis_sq = axis.mul(axis);
let omc = 1.0 - cos;
let xyomc = axis.x * axis.y * omc;
let xzomc = axis.x * axis.z * omc;
let yzomc = axis.y * axis.z * omc;
Self::from_cols(
Vec4::new(
axis_sq.x * omc + cos,
xyomc + axis_sin.z,
xzomc - axis_sin.y,
0.0,
),
Vec4::new(
xyomc - axis_sin.z,
axis_sq.y * omc + cos,
yzomc + axis_sin.x,
0.0,
),
Vec4::new(
xzomc + axis_sin.y,
yzomc - axis_sin.x,
axis_sq.z * omc + cos,
0.0,
),
Vec4::W,
)
}
#[inline]
/// Creates a affine transformation matrix containing a rotation from the given euler
/// rotation sequence and angles (in radians).
///
/// The resulting matrix can be used to transform 3D points and vectors. See
/// [`Self::transform_point3()`] and [`Self::transform_vector3()`].
pub fn from_euler(order: EulerRot, a: f32, b: f32, c: f32) -> Self {
let quat = Quat::from_euler(order, a, b, c);
Self::from_quat(quat)
}
/// Creates an affine transformation matrix containing a 3D rotation around the x axis of
/// `angle` (in radians).
///
/// The resulting matrix can be used to transform 3D points and vectors. See
/// [`Self::transform_point3()`] and [`Self::transform_vector3()`].
#[inline]
pub fn from_rotation_x(angle: f32) -> Self {
let (sina, cosa) = math::sin_cos(angle);
Self::from_cols(
Vec4::X,
Vec4::new(0.0, cosa, sina, 0.0),
Vec4::new(0.0, -sina, cosa, 0.0),
Vec4::W,
)
}
/// Creates an affine transformation matrix containing a 3D rotation around the y axis of
/// `angle` (in radians).
///
/// The resulting matrix can be used to transform 3D points and vectors. See
/// [`Self::transform_point3()`] and [`Self::transform_vector3()`].
#[inline]
pub fn from_rotation_y(angle: f32) -> Self {
let (sina, cosa) = math::sin_cos(angle);
Self::from_cols(
Vec4::new(cosa, 0.0, -sina, 0.0),
Vec4::Y,
Vec4::new(sina, 0.0, cosa, 0.0),
Vec4::W,
)
}
/// Creates an affine transformation matrix containing a 3D rotation around the z axis of
/// `angle` (in radians).
///
/// The resulting matrix can be used to transform 3D points and vectors. See
/// [`Self::transform_point3()`] and [`Self::transform_vector3()`].
#[inline]
pub fn from_rotation_z(angle: f32) -> Self {
let (sina, cosa) = math::sin_cos(angle);
Self::from_cols(
Vec4::new(cosa, sina, 0.0, 0.0),
Vec4::new(-sina, cosa, 0.0, 0.0),
Vec4::Z,
Vec4::W,
)
}
/// Creates an affine transformation matrix containing the given 3D non-uniform `scale`.
///
/// The resulting matrix can be used to transform 3D points and vectors. See
/// [`Self::transform_point3()`] and [`Self::transform_vector3()`].
///
/// # Panics
///
/// Will panic if all elements of `scale` are zero when `glam_assert` is enabled.
#[inline]
pub fn from_scale(scale: Vec3) -> Self {
// Do not panic as long as any component is non-zero
glam_assert!(scale.cmpne(Vec3::ZERO).any());
Self::from_cols(
Vec4::new(scale.x, 0.0, 0.0, 0.0),
Vec4::new(0.0, scale.y, 0.0, 0.0),
Vec4::new(0.0, 0.0, scale.z, 0.0),
Vec4::W,
)
}
/// Creates a 4x4 matrix from the first 16 values in `slice`.
///
/// # Panics
///
/// Panics if `slice` is less than 16 elements long.
#[inline]
pub const fn from_cols_slice(slice: &[f32]) -> Self {
Self::new(
slice[0], slice[1], slice[2], slice[3], slice[4], slice[5], slice[6], slice[7],
slice[8], slice[9], slice[10], slice[11], slice[12], slice[13], slice[14], slice[15],
)
}
/// Writes the columns of `self` to the first 16 elements in `slice`.
///
/// # Panics
///
/// Panics if `slice` is less than 16 elements long.
#[inline]
pub fn write_cols_to_slice(self, slice: &mut [f32]) {
slice[0] = self.x_axis.x;
slice[1] = self.x_axis.y;
slice[2] = self.x_axis.z;
slice[3] = self.x_axis.w;
slice[4] = self.y_axis.x;
slice[5] = self.y_axis.y;
slice[6] = self.y_axis.z;
slice[7] = self.y_axis.w;
slice[8] = self.z_axis.x;
slice[9] = self.z_axis.y;
slice[10] = self.z_axis.z;
slice[11] = self.z_axis.w;
slice[12] = self.w_axis.x;
slice[13] = self.w_axis.y;
slice[14] = self.w_axis.z;
slice[15] = self.w_axis.w;
}
/// Returns the matrix column for the given `index`.
///
/// # Panics
///
/// Panics if `index` is greater than 3.
#[inline]
pub fn col(&self, index: usize) -> Vec4 {
match index {
0 => self.x_axis,
1 => self.y_axis,
2 => self.z_axis,
3 => self.w_axis,
_ => panic!("index out of bounds"),
}
}
/// Returns a mutable reference to the matrix column for the given `index`.
///
/// # Panics
///
/// Panics if `index` is greater than 3.
#[inline]
pub fn col_mut(&mut self, index: usize) -> &mut Vec4 {
match index {
0 => &mut self.x_axis,
1 => &mut self.y_axis,
2 => &mut self.z_axis,
3 => &mut self.w_axis,
_ => panic!("index out of bounds"),
}
}
/// Returns the matrix row for the given `index`.
///
/// # Panics
///
/// Panics if `index` is greater than 3.
#[inline]
pub fn row(&self, index: usize) -> Vec4 {
match index {
0 => Vec4::new(self.x_axis.x, self.y_axis.x, self.z_axis.x, self.w_axis.x),
1 => Vec4::new(self.x_axis.y, self.y_axis.y, self.z_axis.y, self.w_axis.y),
2 => Vec4::new(self.x_axis.z, self.y_axis.z, self.z_axis.z, self.w_axis.z),
3 => Vec4::new(self.x_axis.w, self.y_axis.w, self.z_axis.w, self.w_axis.w),
_ => panic!("index out of bounds"),
}
}
/// Returns `true` if, and only if, all elements are finite.
/// If any element is either `NaN`, positive or negative infinity, this will return `false`.
#[inline]
pub fn is_finite(&self) -> bool {
self.x_axis.is_finite()
&& self.y_axis.is_finite()
&& self.z_axis.is_finite()
&& self.w_axis.is_finite()
}
/// Returns `true` if any elements are `NaN`.
#[inline]
pub fn is_nan(&self) -> bool {
self.x_axis.is_nan() || self.y_axis.is_nan() || self.z_axis.is_nan() || self.w_axis.is_nan()
}
/// Returns the transpose of `self`.
#[must_use]
#[inline]
pub fn transpose(&self) -> Self {
// Based on https://github.com/microsoft/DirectXMath `XMMatrixTranspose`
let tmp0 = simd_swizzle!(
self.x_axis.0,
self.y_axis.0,
[First(0), First(1), Second(0), Second(1)]
);
let tmp1 = simd_swizzle!(
self.x_axis.0,
self.y_axis.0,
[First(2), First(3), Second(2), Second(3)]
);
let tmp2 = simd_swizzle!(
self.z_axis.0,
self.w_axis.0,
[First(0), First(1), Second(0), Second(1)]
);
let tmp3 = simd_swizzle!(
self.z_axis.0,
self.w_axis.0,
[First(2), First(3), Second(2), Second(3)]
);
Self {
x_axis: Vec4(simd_swizzle!(
tmp0,
tmp2,
[First(0), First(2), Second(0), Second(2)]
)),
y_axis: Vec4(simd_swizzle!(
tmp0,
tmp2,
[First(1), First(3), Second(1), Second(3)]
)),
z_axis: Vec4(simd_swizzle!(
tmp1,
tmp3,
[First(0), First(2), Second(0), Second(2)]
)),
w_axis: Vec4(simd_swizzle!(
tmp1,
tmp3,
[First(1), First(3), Second(1), Second(3)]
)),
}
}
/// Returns the determinant of `self`.
pub fn determinant(&self) -> f32 {
// Based on https://github.com/g-truc/glm `glm_mat4_determinant`
let swp2a = simd_swizzle!(self.z_axis.0, [2, 1, 1, 0]);
let swp3a = simd_swizzle!(self.w_axis.0, [3, 3, 2, 3]);
let swp2b = simd_swizzle!(self.z_axis.0, [3, 3, 2, 3]);
let swp3b = simd_swizzle!(self.w_axis.0, [2, 1, 1, 0]);
let swp2c = simd_swizzle!(self.z_axis.0, [2, 1, 0, 0]);
let swp3c = simd_swizzle!(self.w_axis.0, [0, 0, 2, 1]);
let mula = swp2a * swp3a;
let mulb = swp2b * swp3b;
let mulc = swp2c * swp3c;
let sube = mula - mulb;
let subf = simd_swizzle!(mulc, [2, 3, 2, 3]) - mulc;
let subfaca = simd_swizzle!(sube, [0, 0, 1, 2]);
let swpfaca = simd_swizzle!(self.y_axis.0, [1, 0, 0, 0]);
let mulfaca = swpfaca * subfaca;
let subtmpb = simd_swizzle!(sube, subf, [First(1), First(3), Second(0), Second(0)]);
let subfacb = simd_swizzle!(subtmpb, [0, 1, 1, 3]);
let swpfacb = simd_swizzle!(self.y_axis.0, [2, 2, 1, 1]);
let mulfacb = swpfacb * subfacb;
let subres = mulfaca - mulfacb;
let subtmpc = simd_swizzle!(sube, subf, [First(2), First(2), Second(0), Second(1)]);
let subfacc = simd_swizzle!(subtmpc, [0, 2, 3, 3]);
let swpfacc = simd_swizzle!(self.y_axis.0, [3, 3, 3, 2]);
let mulfacc = swpfacc * subfacc;
let addres = subres + mulfacc;
let detcof = addres * f32x4::from_array([1.0, -1.0, 1.0, -1.0]);
dot4(self.x_axis.0, detcof)
}
/// Returns the inverse of `self`.
///
/// If the matrix is not invertible the returned matrix will be invalid.
///
/// # Panics
///
/// Will panic if the determinant of `self` is zero when `glam_assert` is enabled.
#[must_use]
pub fn inverse(&self) -> Self {
// Based on https://github.com/g-truc/glm `glm_mat4_inverse`
let fac0 = {
let swp0a = simd_swizzle!(
self.w_axis.0,
self.z_axis.0,
[First(3), First(3), Second(3), Second(3)]
);
let swp0b = simd_swizzle!(
self.w_axis.0,
self.z_axis.0,
[First(2), First(2), Second(2), Second(2)]
);
let swp00 = simd_swizzle!(
self.z_axis.0,
self.y_axis.0,
[First(2), First(2), Second(2), Second(2)]
);
let swp01 = simd_swizzle!(swp0a, [0, 0, 0, 2]);
let swp02 = simd_swizzle!(swp0b, [0, 0, 0, 2]);
let swp03 = simd_swizzle!(
self.z_axis.0,
self.y_axis.0,
[First(3), First(3), Second(3), Second(3)]
);
let mul00 = swp00 * swp01;
let mul01 = swp02 * swp03;
mul00 - mul01
};
let fac1 = {
let swp0a = simd_swizzle!(
self.w_axis.0,
self.z_axis.0,
[First(3), First(3), Second(3), Second(3)]
);
let swp0b = simd_swizzle!(
self.w_axis.0,
self.z_axis.0,
[First(1), First(1), Second(1), Second(1)]
);
let swp00 = simd_swizzle!(
self.z_axis.0,
self.y_axis.0,
[First(1), First(1), Second(1), Second(1)]
);
let swp01 = simd_swizzle!(swp0a, [0, 0, 0, 2]);
let swp02 = simd_swizzle!(swp0b, [0, 0, 0, 2]);
let swp03 = simd_swizzle!(
self.z_axis.0,
self.y_axis.0,
[First(3), First(3), Second(3), Second(3)]
);
let mul00 = swp00 * swp01;
let mul01 = swp02 * swp03;
mul00 - mul01
};
let fac2 = {
let swp0a = simd_swizzle!(
self.w_axis.0,
self.z_axis.0,
[First(2), First(2), Second(2), Second(2)]
);
let swp0b = simd_swizzle!(
self.w_axis.0,
self.z_axis.0,
[First(1), First(1), Second(1), Second(1)]
);
let swp00 = simd_swizzle!(
self.z_axis.0,
self.y_axis.0,
[First(1), First(1), Second(1), Second(1)]
);
let swp01 = simd_swizzle!(swp0a, [0, 0, 0, 2]);
let swp02 = simd_swizzle!(swp0b, [0, 0, 0, 2]);
let swp03 = simd_swizzle!(
self.z_axis.0,
self.y_axis.0,
[First(2), First(2), Second(2), Second(2)]
);
let mul00 = swp00 * swp01;
let mul01 = swp02 * swp03;
mul00 - mul01
};
let fac3 = {
let swp0a = simd_swizzle!(
self.w_axis.0,
self.z_axis.0,
[First(3), First(3), Second(3), Second(3)]
);
let swp0b = simd_swizzle!(
self.w_axis.0,
self.z_axis.0,
[First(0), First(0), Second(0), Second(0)]
);
let swp00 = simd_swizzle!(
self.z_axis.0,
self.y_axis.0,
[First(0), First(0), Second(0), Second(0)]
);
let swp01 = simd_swizzle!(swp0a, [0, 0, 0, 2]);
let swp02 = simd_swizzle!(swp0b, [0, 0, 0, 2]);
let swp03 = simd_swizzle!(
self.z_axis.0,
self.y_axis.0,
[First(3), First(3), Second(3), Second(3)]
);
let mul00 = swp00 * swp01;
let mul01 = swp02 * swp03;
mul00 - mul01
};
let fac4 = {
let swp0a = simd_swizzle!(
self.w_axis.0,
self.z_axis.0,
[First(2), First(2), Second(2), Second(2)]
);
let swp0b = simd_swizzle!(
self.w_axis.0,
self.z_axis.0,
[First(0), First(0), Second(0), Second(0)]
);
let swp00 = simd_swizzle!(
self.z_axis.0,
self.y_axis.0,
[First(0), First(0), Second(0), Second(0)]
);
let swp01 = simd_swizzle!(swp0a, [0, 0, 0, 2]);
let swp02 = simd_swizzle!(swp0b, [0, 0, 0, 2]);
let swp03 = simd_swizzle!(
self.z_axis.0,
self.y_axis.0,
[First(2), First(2), Second(2), Second(2)]
);
let mul00 = swp00 * swp01;
let mul01 = swp02 * swp03;
mul00 - mul01
};
let fac5 = {
let swp0a = simd_swizzle!(
self.w_axis.0,
self.z_axis.0,
[First(1), First(1), Second(1), Second(1)]
);
let swp0b = simd_swizzle!(
self.w_axis.0,
self.z_axis.0,
[First(0), First(0), Second(0), Second(0)]
);
let swp00 = simd_swizzle!(
self.z_axis.0,
self.y_axis.0,
[First(0), First(0), Second(0), Second(0)]
);
let swp01 = simd_swizzle!(swp0a, [0, 0, 0, 2]);
let swp02 = simd_swizzle!(swp0b, [0, 0, 0, 2]);
let swp03 = simd_swizzle!(
self.z_axis.0,
self.y_axis.0,
[First(1), First(1), Second(1), Second(1)]
);
let mul00 = swp00 * swp01;
let mul01 = swp02 * swp03;
mul00 - mul01
};
let sign_a = f32x4::from_array([-1.0, 1.0, -1.0, 1.0]);
let sign_b = f32x4::from_array([1.0, -1.0, 1.0, -1.0]);
let temp0 = simd_swizzle!(
self.y_axis.0,
self.x_axis.0,
[First(0), First(0), Second(0), Second(0)]
);
let vec0 = simd_swizzle!(temp0, [0, 2, 2, 2]);
let temp1 = simd_swizzle!(
self.y_axis.0,
self.x_axis.0,
[First(1), First(1), Second(1), Second(1)]
);
let vec1 = simd_swizzle!(temp1, [0, 2, 2, 2]);
let temp2 = simd_swizzle!(
self.y_axis.0,
self.x_axis.0,
[First(2), First(2), Second(2), Second(2)]
);
let vec2 = simd_swizzle!(temp2, [0, 2, 2, 2]);
let temp3 = simd_swizzle!(
self.y_axis.0,
self.x_axis.0,
[First(3), First(3), Second(3), Second(3)]
);
let vec3 = simd_swizzle!(temp3, [0, 2, 2, 2]);
let mul00 = vec1 * fac0;
let mul01 = vec2 * fac1;
let mul02 = vec3 * fac2;
let sub00 = mul00 - mul01;
let add00 = sub00 + mul02;
let inv0 = sign_b * add00;
let mul03 = vec0 * fac0;
let mul04 = vec2 * fac3;
let mul05 = vec3 * fac4;
let sub01 = mul03 - mul04;
let add01 = sub01 + mul05;
let inv1 = sign_a * add01;
let mul06 = vec0 * fac1;
let mul07 = vec1 * fac3;
let mul08 = vec3 * fac5;
let sub02 = mul06 - mul07;
let add02 = sub02 + mul08;
let inv2 = sign_b * add02;
let mul09 = vec0 * fac2;
let mul10 = vec1 * fac4;
let mul11 = vec2 * fac5;
let sub03 = mul09 - mul10;
let add03 = sub03 + mul11;
let inv3 = sign_a * add03;
let row0 = simd_swizzle!(inv0, inv1, [First(0), First(0), Second(0), Second(0)]);
let row1 = simd_swizzle!(inv2, inv3, [First(0), First(0), Second(0), Second(0)]);
let row2 = simd_swizzle!(row0, row1, [First(0), First(2), Second(0), Second(2)]);
let dot0 = dot4(self.x_axis.0, row2);
glam_assert!(dot0 != 0.0);
let rcp0 = f32x4::splat(dot0.recip());
Self {
x_axis: Vec4(inv0 * rcp0),
y_axis: Vec4(inv1 * rcp0),
z_axis: Vec4(inv2 * rcp0),
w_axis: Vec4(inv3 * rcp0),
}
}
/// Creates a left-handed view matrix using a camera position, an up direction, and a facing
/// direction.
///
/// For a view coordinate system with `+X=right`, `+Y=up` and `+Z=forward`.
#[inline]
pub fn look_to_lh(eye: Vec3, dir: Vec3, up: Vec3) -> Self {
Self::look_to_rh(eye, -dir, up)
}
/// Creates a right-handed view matrix using a camera position, an up direction, and a facing
/// direction.
///
/// For a view coordinate system with `+X=right`, `+Y=up` and `+Z=back`.
#[inline]
pub fn look_to_rh(eye: Vec3, dir: Vec3, up: Vec3) -> Self {
let f = dir.normalize();
let s = f.cross(up).normalize();
let u = s.cross(f);
Self::from_cols(
Vec4::new(s.x, u.x, -f.x, 0.0),
Vec4::new(s.y, u.y, -f.y, 0.0),
Vec4::new(s.z, u.z, -f.z, 0.0),
Vec4::new(-eye.dot(s), -eye.dot(u), eye.dot(f), 1.0),
)
}
/// Creates a left-handed view matrix using a camera position, an up direction, and a focal
/// point.
/// For a view coordinate system with `+X=right`, `+Y=up` and `+Z=forward`.
///
/// # Panics
///
/// Will panic if `up` is not normalized when `glam_assert` is enabled.
#[inline]
pub fn look_at_lh(eye: Vec3, center: Vec3, up: Vec3) -> Self {
glam_assert!(up.is_normalized());
Self::look_to_lh(eye, center.sub(eye), up)
}
/// Creates a right-handed view matrix using a camera position, an up direction, and a focal
/// point.
/// For a view coordinate system with `+X=right`, `+Y=up` and `+Z=back`.
///
/// # Panics
///
/// Will panic if `up` is not normalized when `glam_assert` is enabled.
#[inline]
pub fn look_at_rh(eye: Vec3, center: Vec3, up: Vec3) -> Self {
glam_assert!(up.is_normalized());
Self::look_to_rh(eye, center.sub(eye), up)
}
/// Creates a right-handed perspective projection matrix with [-1,1] depth range.
/// This is the same as the OpenGL `gluPerspective` function.
/// See <https://www.khronos.org/registry/OpenGL-Refpages/gl2.1/xhtml/gluPerspective.xml>
#[inline]
pub fn perspective_rh_gl(
fov_y_radians: f32,
aspect_ratio: f32,
z_near: f32,
z_far: f32,
) -> Self {
let inv_length = 1.0 / (z_near - z_far);
let f = 1.0 / math::tan(0.5 * fov_y_radians);
let a = f / aspect_ratio;
let b = (z_near + z_far) * inv_length;
let c = (2.0 * z_near * z_far) * inv_length;
Self::from_cols(
Vec4::new(a, 0.0, 0.0, 0.0),
Vec4::new(0.0, f, 0.0, 0.0),
Vec4::new(0.0, 0.0, b, -1.0),
Vec4::new(0.0, 0.0, c, 0.0),
)
}
/// Creates a left-handed perspective projection matrix with `[0,1]` depth range.
///
/// # Panics
///
/// Will panic if `z_near` or `z_far` are less than or equal to zero when `glam_assert` is
/// enabled.
#[inline]
pub fn perspective_lh(fov_y_radians: f32, aspect_ratio: f32, z_near: f32, z_far: f32) -> Self {
glam_assert!(z_near > 0.0 && z_far > 0.0);
let (sin_fov, cos_fov) = math::sin_cos(0.5 * fov_y_radians);
let h = cos_fov / sin_fov;
let w = h / aspect_ratio;
let r = z_far / (z_far - z_near);
Self::from_cols(
Vec4::new(w, 0.0, 0.0, 0.0),
Vec4::new(0.0, h, 0.0, 0.0),
Vec4::new(0.0, 0.0, r, 1.0),
Vec4::new(0.0, 0.0, -r * z_near, 0.0),
)
}
/// Creates a right-handed perspective projection matrix with `[0,1]` depth range.
///
/// # Panics
///
/// Will panic if `z_near` or `z_far` are less than or equal to zero when `glam_assert` is
/// enabled.
#[inline]
pub fn perspective_rh(fov_y_radians: f32, aspect_ratio: f32, z_near: f32, z_far: f32) -> Self {
glam_assert!(z_near > 0.0 && z_far > 0.0);
let (sin_fov, cos_fov) = math::sin_cos(0.5 * fov_y_radians);
let h = cos_fov / sin_fov;
let w = h / aspect_ratio;
let r = z_far / (z_near - z_far);
Self::from_cols(
Vec4::new(w, 0.0, 0.0, 0.0),
Vec4::new(0.0, h, 0.0, 0.0),
Vec4::new(0.0, 0.0, r, -1.0),
Vec4::new(0.0, 0.0, r * z_near, 0.0),
)
}
/// Creates an infinite left-handed perspective projection matrix with `[0,1]` depth range.
///
/// # Panics
///
/// Will panic if `z_near` is less than or equal to zero when `glam_assert` is enabled.
#[inline]
pub fn perspective_infinite_lh(fov_y_radians: f32, aspect_ratio: f32, z_near: f32) -> Self {
glam_assert!(z_near > 0.0);
let (sin_fov, cos_fov) = math::sin_cos(0.5 * fov_y_radians);
let h = cos_fov / sin_fov;
let w = h / aspect_ratio;
Self::from_cols(
Vec4::new(w, 0.0, 0.0, 0.0),
Vec4::new(0.0, h, 0.0, 0.0),
Vec4::new(0.0, 0.0, 1.0, 1.0),
Vec4::new(0.0, 0.0, -z_near, 0.0),
)
}
/// Creates an infinite left-handed perspective projection matrix with `[0,1]` depth range.
///
/// # Panics
///
/// Will panic if `z_near` is less than or equal to zero when `glam_assert` is enabled.
#[inline]
pub fn perspective_infinite_reverse_lh(
fov_y_radians: f32,
aspect_ratio: f32,
z_near: f32,
) -> Self {
glam_assert!(z_near > 0.0);
let (sin_fov, cos_fov) = math::sin_cos(0.5 * fov_y_radians);
let h = cos_fov / sin_fov;
let w = h / aspect_ratio;
Self::from_cols(
Vec4::new(w, 0.0, 0.0, 0.0),
Vec4::new(0.0, h, 0.0, 0.0),
Vec4::new(0.0, 0.0, 0.0, 1.0),
Vec4::new(0.0, 0.0, z_near, 0.0),
)
}
/// Creates an infinite right-handed perspective projection matrix with
/// `[0,1]` depth range.
#[inline]
pub fn perspective_infinite_rh(fov_y_radians: f32, aspect_ratio: f32, z_near: f32) -> Self {
glam_assert!(z_near > 0.0);
let f = 1.0 / math::tan(0.5 * fov_y_radians);
Self::from_cols(
Vec4::new(f / aspect_ratio, 0.0, 0.0, 0.0),
Vec4::new(0.0, f, 0.0, 0.0),
Vec4::new(0.0, 0.0, -1.0, -1.0),
Vec4::new(0.0, 0.0, -z_near, 0.0),
)
}
/// Creates an infinite reverse right-handed perspective projection matrix
/// with `[0,1]` depth range.
#[inline]
pub fn perspective_infinite_reverse_rh(
fov_y_radians: f32,
aspect_ratio: f32,
z_near: f32,
) -> Self {
glam_assert!(z_near > 0.0);
let f = 1.0 / math::tan(0.5 * fov_y_radians);
Self::from_cols(
Vec4::new(f / aspect_ratio, 0.0, 0.0, 0.0),
Vec4::new(0.0, f, 0.0, 0.0),
Vec4::new(0.0, 0.0, 0.0, -1.0),
Vec4::new(0.0, 0.0, z_near, 0.0),
)
}
/// Creates a right-handed orthographic projection matrix with `[-1,1]` depth
/// range. This is the same as the OpenGL `glOrtho` function in OpenGL.
/// See
/// <https://www.khronos.org/registry/OpenGL-Refpages/gl2.1/xhtml/glOrtho.xml>
#[inline]
pub fn orthographic_rh_gl(
left: f32,
right: f32,
bottom: f32,
top: f32,
near: f32,
far: f32,
) -> Self {
let a = 2.0 / (right - left);
let b = 2.0 / (top - bottom);
let c = -2.0 / (far - near);
let tx = -(right + left) / (right - left);
let ty = -(top + bottom) / (top - bottom);
let tz = -(far + near) / (far - near);
Self::from_cols(
Vec4::new(a, 0.0, 0.0, 0.0),
Vec4::new(0.0, b, 0.0, 0.0),
Vec4::new(0.0, 0.0, c, 0.0),
Vec4::new(tx, ty, tz, 1.0),
)
}
/// Creates a left-handed orthographic projection matrix with `[0,1]` depth range.
#[inline]
pub fn orthographic_lh(
left: f32,
right: f32,
bottom: f32,
top: f32,
near: f32,
far: f32,
) -> Self {
let rcp_width = 1.0 / (right - left);
let rcp_height = 1.0 / (top - bottom);
let r = 1.0 / (far - near);
Self::from_cols(
Vec4::new(rcp_width + rcp_width, 0.0, 0.0, 0.0),
Vec4::new(0.0, rcp_height + rcp_height, 0.0, 0.0),
Vec4::new(0.0, 0.0, r, 0.0),
Vec4::new(
-(left + right) * rcp_width,
-(top + bottom) * rcp_height,
-r * near,
1.0,
),
)
}
/// Creates a right-handed orthographic projection matrix with `[0,1]` depth range.
#[inline]
pub fn orthographic_rh(
left: f32,
right: f32,
bottom: f32,
top: f32,
near: f32,
far: f32,
) -> Self {
let rcp_width = 1.0 / (right - left);
let rcp_height = 1.0 / (top - bottom);
let r = 1.0 / (near - far);
Self::from_cols(
Vec4::new(rcp_width + rcp_width, 0.0, 0.0, 0.0),
Vec4::new(0.0, rcp_height + rcp_height, 0.0, 0.0),
Vec4::new(0.0, 0.0, r, 0.0),
Vec4::new(
-(left + right) * rcp_width,
-(top + bottom) * rcp_height,
r * near,
1.0,
),
)
}
/// Transforms the given 3D vector as a point, applying perspective correction.
///
/// This is the equivalent of multiplying the 3D vector as a 4D vector where `w` is `1.0`.
/// The perspective divide is performed meaning the resulting 3D vector is divided by `w`.
///
/// This method assumes that `self` contains a projective transform.
#[inline]
pub fn project_point3(&self, rhs: Vec3) -> Vec3 {
let mut res = self.x_axis.mul(rhs.x);
res = self.y_axis.mul(rhs.y).add(res);
res = self.z_axis.mul(rhs.z).add(res);
res = self.w_axis.add(res);
res = res.mul(res.wwww().recip());
res.xyz()
}
/// Transforms the given 3D vector as a point.
///
/// This is the equivalent of multiplying the 3D vector as a 4D vector where `w` is
/// `1.0`.
///
/// This method assumes that `self` contains a valid affine transform. It does not perform
/// a persective divide, if `self` contains a perspective transform, or if you are unsure,
/// the [`Self::project_point3()`] method should be used instead.
///
/// # Panics
///
/// Will panic if the 3rd row of `self` is not `(0, 0, 0, 1)` when `glam_assert` is enabled.
#[inline]
pub fn transform_point3(&self, rhs: Vec3) -> Vec3 {
glam_assert!(self.row(3).abs_diff_eq(Vec4::W, 1e-6));
let mut res = self.x_axis.mul(rhs.x);
res = self.y_axis.mul(rhs.y).add(res);
res = self.z_axis.mul(rhs.z).add(res);
res = self.w_axis.add(res);
res.xyz()
}
/// Transforms the give 3D vector as a direction.
///
/// This is the equivalent of multiplying the 3D vector as a 4D vector where `w` is
/// `0.0`.
///
/// This method assumes that `self` contains a valid affine transform.
///
/// # Panics
///
/// Will panic if the 3rd row of `self` is not `(0, 0, 0, 1)` when `glam_assert` is enabled.
#[inline]
pub fn transform_vector3(&self, rhs: Vec3) -> Vec3 {
glam_assert!(self.row(3).abs_diff_eq(Vec4::W, 1e-6));
let mut res = self.x_axis.mul(rhs.x);
res = self.y_axis.mul(rhs.y).add(res);
res = self.z_axis.mul(rhs.z).add(res);
res.xyz()
}
/// Transforms the given [`Vec3A`] as 3D point.
///
/// This is the equivalent of multiplying the [`Vec3A`] as a 4D vector where `w` is `1.0`.
#[inline]
pub fn transform_point3a(&self, rhs: Vec3A) -> Vec3A {
glam_assert!(self.row(3).abs_diff_eq(Vec4::W, 1e-6));
let mut res = self.x_axis.mul(rhs.xxxx());
res = self.y_axis.mul(rhs.yyyy()).add(res);
res = self.z_axis.mul(rhs.zzzz()).add(res);
res = self.w_axis.add(res);
res.into()
}
/// Transforms the give [`Vec3A`] as 3D vector.
///
/// This is the equivalent of multiplying the [`Vec3A`] as a 4D vector where `w` is `0.0`.
#[inline]
pub fn transform_vector3a(&self, rhs: Vec3A) -> Vec3A {
glam_assert!(self.row(3).abs_diff_eq(Vec4::W, 1e-6));
let mut res = self.x_axis.mul(rhs.xxxx());
res = self.y_axis.mul(rhs.yyyy()).add(res);
res = self.z_axis.mul(rhs.zzzz()).add(res);
res.into()
}
/// Transforms a 4D vector.
#[inline]
pub fn mul_vec4(&self, rhs: Vec4) -> Vec4 {
let mut res = self.x_axis.mul(rhs.xxxx());
res = res.add(self.y_axis.mul(rhs.yyyy()));
res = res.add(self.z_axis.mul(rhs.zzzz()));
res = res.add(self.w_axis.mul(rhs.wwww()));
res
}
/// Multiplies two 4x4 matrices.
#[inline]
pub fn mul_mat4(&self, rhs: &Self) -> Self {
Self::from_cols(
self.mul(rhs.x_axis),
self.mul(rhs.y_axis),
self.mul(rhs.z_axis),
self.mul(rhs.w_axis),
)
}
/// Adds two 4x4 matrices.
#[inline]
pub fn add_mat4(&self, rhs: &Self) -> Self {
Self::from_cols(
self.x_axis.add(rhs.x_axis),
self.y_axis.add(rhs.y_axis),
self.z_axis.add(rhs.z_axis),
self.w_axis.add(rhs.w_axis),
)
}
/// Subtracts two 4x4 matrices.
#[inline]
pub fn sub_mat4(&self, rhs: &Self) -> Self {
Self::from_cols(
self.x_axis.sub(rhs.x_axis),
self.y_axis.sub(rhs.y_axis),
self.z_axis.sub(rhs.z_axis),
self.w_axis.sub(rhs.w_axis),
)
}
/// Multiplies a 4x4 matrix by a scalar.
#[inline]
pub fn mul_scalar(&self, rhs: f32) -> Self {
Self::from_cols(
self.x_axis.mul(rhs),
self.y_axis.mul(rhs),
self.z_axis.mul(rhs),
self.w_axis.mul(rhs),
)
}
/// Returns true if the absolute difference of all elements between `self` and `rhs`
/// is less than or equal to `max_abs_diff`.
///
/// This can be used to compare if two matrices contain similar elements. It works best
/// when comparing with a known value. The `max_abs_diff` that should be used used
/// depends on the values being compared against.
///
/// For more see
/// [comparing floating point numbers](https://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/).
#[inline]
pub fn abs_diff_eq(&self, rhs: Self, max_abs_diff: f32) -> bool {
self.x_axis.abs_diff_eq(rhs.x_axis, max_abs_diff)
&& self.y_axis.abs_diff_eq(rhs.y_axis, max_abs_diff)
&& self.z_axis.abs_diff_eq(rhs.z_axis, max_abs_diff)
&& self.w_axis.abs_diff_eq(rhs.w_axis, max_abs_diff)
}
#[inline]
pub fn as_dmat4(&self) -> DMat4 {
DMat4::from_cols(
self.x_axis.as_dvec4(),
self.y_axis.as_dvec4(),
self.z_axis.as_dvec4(),
self.w_axis.as_dvec4(),
)
}
}
impl Default for Mat4 {
#[inline]
fn default() -> Self {
Self::IDENTITY
}
}
impl Add<Mat4> for Mat4 {
type Output = Self;
#[inline]
fn add(self, rhs: Self) -> Self::Output {
self.add_mat4(&rhs)
}
}
impl AddAssign<Mat4> for Mat4 {
#[inline]
fn add_assign(&mut self, rhs: Self) {
*self = self.add_mat4(&rhs);
}
}
impl Sub<Mat4> for Mat4 {
type Output = Self;
#[inline]
fn sub(self, rhs: Self) -> Self::Output {
self.sub_mat4(&rhs)
}
}
impl SubAssign<Mat4> for Mat4 {
#[inline]
fn sub_assign(&mut self, rhs: Self) {
*self = self.sub_mat4(&rhs);
}
}
impl Neg for Mat4 {
type Output = Self;
#[inline]
fn neg(self) -> Self::Output {
Self::from_cols(
self.x_axis.neg(),
self.y_axis.neg(),
self.z_axis.neg(),
self.w_axis.neg(),
)
}
}
impl Mul<Mat4> for Mat4 {
type Output = Self;
#[inline]
fn mul(self, rhs: Self) -> Self::Output {
self.mul_mat4(&rhs)
}
}
impl MulAssign<Mat4> for Mat4 {
#[inline]
fn mul_assign(&mut self, rhs: Self) {
*self = self.mul_mat4(&rhs);
}
}
impl Mul<Vec4> for Mat4 {
type Output = Vec4;
#[inline]
fn mul(self, rhs: Vec4) -> Self::Output {
self.mul_vec4(rhs)
}
}
impl Mul<Mat4> for f32 {
type Output = Mat4;
#[inline]
fn mul(self, rhs: Mat4) -> Self::Output {
rhs.mul_scalar(self)
}
}
impl Mul<f32> for Mat4 {
type Output = Self;
#[inline]
fn mul(self, rhs: f32) -> Self::Output {
self.mul_scalar(rhs)
}
}
impl MulAssign<f32> for Mat4 {
#[inline]
fn mul_assign(&mut self, rhs: f32) {
*self = self.mul_scalar(rhs);
}
}
impl Sum<Self> for Mat4 {
fn sum<I>(iter: I) -> Self
where
I: Iterator<Item = Self>,
{
iter.fold(Self::ZERO, Self::add)
}
}
impl<'a> Sum<&'a Self> for Mat4 {
fn sum<I>(iter: I) -> Self
where
I: Iterator<Item = &'a Self>,
{
iter.fold(Self::ZERO, |a, &b| Self::add(a, b))
}
}
impl Product for Mat4 {
fn product<I>(iter: I) -> Self
where
I: Iterator<Item = Self>,
{
iter.fold(Self::IDENTITY, Self::mul)
}
}
impl<'a> Product<&'a Self> for Mat4 {
fn product<I>(iter: I) -> Self
where
I: Iterator<Item = &'a Self>,
{
iter.fold(Self::IDENTITY, |a, &b| Self::mul(a, b))
}
}
impl PartialEq for Mat4 {
#[inline]
fn eq(&self, rhs: &Self) -> bool {
self.x_axis.eq(&rhs.x_axis)
&& self.y_axis.eq(&rhs.y_axis)
&& self.z_axis.eq(&rhs.z_axis)
&& self.w_axis.eq(&rhs.w_axis)
}
}
#[cfg(not(target_arch = "spirv"))]
impl AsRef<[f32; 16]> for Mat4 {
#[inline]
fn as_ref(&self) -> &[f32; 16] {
unsafe { &*(self as *const Self as *const [f32; 16]) }
}
}
#[cfg(not(target_arch = "spirv"))]
impl AsMut<[f32; 16]> for Mat4 {
#[inline]
fn as_mut(&mut self) -> &mut [f32; 16] {
unsafe { &mut *(self as *mut Self as *mut [f32; 16]) }
}
}
#[cfg(not(target_arch = "spirv"))]
impl fmt::Debug for Mat4 {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt.debug_struct(stringify!(Mat4))
.field("x_axis", &self.x_axis)
.field("y_axis", &self.y_axis)
.field("z_axis", &self.z_axis)
.field("w_axis", &self.w_axis)
.finish()
}
}
#[cfg(not(target_arch = "spirv"))]
impl fmt::Display for Mat4 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"[{}, {}, {}, {}]",
self.x_axis, self.y_axis, self.z_axis, self.w_axis
)
}
}
|
/*
Copyright 2019-2023 Didier Plaindoux
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
use std::marker::PhantomData;
use crate::parser::parser::Combine;
use crate::parser::parser::Parse;
use crate::parser::response::Response::Reject;
use crate::parser::response::Response::Success;
use crate::stream::position::Position;
use crate::stream::stream::Stream;
pub struct ChainedStream<E, S, L>(Vec<S>, PhantomData<L>, PhantomData<E>)
where
S: Stream<Pos = L>,
L: Position;
impl<E, S, L> ChainedStream<E, S, L>
where
S: Stream<Pos = L>,
L: Position,
{
#[inline]
pub fn new(v: Vec<S>) -> Self {
ChainedStream(v, PhantomData, PhantomData)
}
}
impl<E, S, L> Clone for ChainedStream<E, S, L>
where
S: Stream<Pos = L>,
L: Position,
{
fn clone(&self) -> Self {
ChainedStream::new(vec!(self.0.clone(), self.1.clone()))
}
}
impl<E, S, L> Stream for ChainedStream<E, S, L>
where
S: Stream<Pos = L> + Clone,
L: Position + Clone,
{
type Item = E;
type Pos = L;
fn position(&self) -> Self::Pos {
self.1.position()
}
fn next(&self) -> (Option<Self::Item>, Self) {
if self.0.is_empty() {
return (None, self.clone())
}
match self.0.next() {
(Some(a), s) => (Some(a), ChainedStream::new(vec!(s, self.1.clone()))),
(None, _) =>
match self.1.next() {
(Some(a), s) => (Some(a), ChainedStream::new(vec!(s))),
(None, s) => (None, ChainedStream::new(vec!(s)))
}
}
}
}
|
use std::fmt;
trait HasArea {
fn get_area(&self) -> i32;
}
struct Rectangle {
width: i32,
height: i32,
}
impl HasArea for Rectangle {
fn get_area(&self) -> i32 {
self.width * self.height
}
}
impl fmt::Display for Rectangle {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} * {}", self.width, self.height)
}
}
impl fmt::Debug for Rectangle {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "width = {} and height = {}", self.width, self.height)
}
}
struct Triangle {
base: i32,
attitude: i32,
}
impl HasArea for Triangle {
fn get_area(&self) -> i32 {
self.base * self.attitude / 2
}
}
trait Number {
fn display_all(&self);
}
impl Number for i32 {
fn display_all(&self) {
println!("{}, {:b}, {:o}, {:x}", &self, &self, &self, &self);
}
}
struct Student {
name: String,
}
trait Message {
fn to_message(&self);
}
impl Message for Student {
fn to_message(&self) {
println!("私の名前は{}です", self.name);
}
}
trait EnglishMessage: Message {
fn to_english_message(&self);
}
impl EnglishMessage for Student {
fn to_english_message(&self) {
println!("My name is {}.", self.name);
}
}
trait Polygon {
fn get_area2(&self) -> i32;
// Default Method
fn is_square(&self) -> bool {
true
}
}
impl Polygon for Rectangle {
fn get_area2(&self) -> i32 {
self.width * self.height
}
fn is_square(&self) -> bool {
if self.width == self.height {
true
} else {
false
}
}
}
struct Square {
edge: i32,
}
impl Polygon for Square {
fn get_area2(&self) -> i32 {
self.edge * self.edge
}
}
fn main() {
let rect = Rectangle {
width: 10,
height: 20,
};
let tri = Triangle {
base: 10,
attitude: 30,
};
println!("長方形の面積 = {}", rect.get_area());
println!("三角形の面積 = {}", tri.get_area());
println!("rect: {}", rect);
println!("rect: {:?}", rect);
let x: i32 = 25;
x.display_all();
100.display_all();
let student = Student {
name: "Alice".to_string(),
};
student.to_message();
student.to_english_message();
let sq = Square { edge: 15 };
println!(
"rect: 面積 = {}, 正方形か否か = {}",
rect.get_area2(),
rect.is_square()
);
println!(
"sq: 面積 = {}, 正方形か否か = {}",
sq.get_area2(),
sq.is_square()
);
}
|
// https://adventofcode.com/2017/day/8
use std::io::{BufRead, BufReader};
use std::fs::File;
use std::collections::HashMap;
fn check_cnd(split: &Vec<&str>, registers: &mut HashMap<String, i32>) -> bool {
let lhs = registers.entry(split[4].to_string()).or_insert(0);
let rhs = split[6].parse::<i32>().unwrap();
match split[5] {
"<" => *lhs < rhs,
"<=" => *lhs <= rhs,
"==" => *lhs == rhs,
"!=" => *lhs != rhs,
">=" => *lhs >= rhs,
">" => *lhs > rhs,
_ => panic!("incorrect operator \"{}\"", split[5]),
}
}
fn main() {
let f = BufReader::new(File::open("input.txt").expect("Opening input.txt failed"));
let mut largest_during = 0;
let mut registers = HashMap::new();
for line in f.lines() {
let raw_line = line.expect("Reading line failed");
let split: Vec<&str> = raw_line.split(' ').collect();
if check_cnd(&split, &mut registers) {
let target_reg = registers.entry(split[0].to_string()).or_insert(0);
let amount = split[2].parse::<i32>().unwrap();
*target_reg = match split[1] {
"inc" => *target_reg + amount,
"dec" => *target_reg - amount,
_ => panic!("incorrect instruction \"{}\"", split[1]),
};
if *target_reg > largest_during {
largest_during = *target_reg;
}
}
}
let mut largest_after = 0;
for &v in registers.values() {
if v > largest_after {
largest_after = v;
}
}
// Assert to facilitate further tweaks
assert_eq!(4902, largest_after);
assert_eq!(7037, largest_during);
println!("Largest value after execution is {}", largest_after);
println!("Largest value during execution was {}", largest_during);
}
|
use std::io;
use std::fs;
use nom::branch::alt;
use nom::bytes::complete::tag;
use nom::character::complete::digit1;
use nom::combinator::map;
use nom::sequence::preceded;
use nom::IResult;
type Deck = Vec<u32>;
#[derive(Debug)]
enum Instruction {
DealIntoNew,
Cut(isize),
DealWithIncr(usize),
}
fn deal_into_new(mut deck: Deck) -> Deck {
deck.reverse();
deck
}
fn cut(mut deck: Deck, mut index: isize) -> Deck {
if index < 0 {
index += deck.len() as isize;
}
let mut new_deck: Deck = Vec::with_capacity(deck.len());
let mut bottom = deck.split_off(index as usize);
new_deck.append(&mut bottom);
new_deck.append(&mut deck);
new_deck
}
fn deal_with_incr(deck: Deck, increment: usize) -> Deck {
let len = deck.len();
let mut new_deck: Deck = Vec::with_capacity(len);
new_deck.resize(len, 0);
for (pos, card) in deck.into_iter().enumerate() {
let index = (pos * increment) % len;
new_deck[index] = card;
}
new_deck
}
fn make_deck(size: usize) -> Deck {
let mut deck: Deck = Vec::with_capacity(size);
for c in 0..size {
deck.push(c as u32)
}
deck
}
fn parse_num(input: &str) -> IResult<&str, isize> {
alt((
map(digit1, |digit_str: &str| digit_str.parse::<isize>().unwrap()),
map(preceded(tag("-"), digit1), |digit_str: &str|
-1 * digit_str.parse::<isize>().unwrap()),
))(input)
}
fn parse_new(input: &str) -> IResult<&str, Instruction> {
let (input, _) = tag("deal into new stack")(input)?;
Ok((input, Instruction::DealIntoNew))
}
fn parse_cut(input: &str) -> IResult<&str, Instruction> {
let (input, _) = tag("cut ")(input)?;
let (input, index) = parse_num(input)?;
Ok((input, Instruction::Cut(index)))
}
fn parse_deal(input: &str) -> IResult<&str, Instruction> {
let (input, _) = tag("deal with increment ")(input)?;
let (input, incr) = parse_num(input)?;
Ok((input, Instruction::DealWithIncr(incr as usize)))
}
fn apply_instructions(mut deck: Deck, instructions: &str) -> Deck {
for line in instructions.lines() {
let (_, instr) = alt((parse_new, parse_cut, parse_deal))(line).unwrap();
match instr {
Instruction::DealIntoNew => deck = deal_into_new(deck),
Instruction::Cut(index) => deck = cut(deck, index),
Instruction::DealWithIncr(incr) => deck = deal_with_incr(deck, incr),
}
}
deck
}
fn main() -> io::Result<()> {
let deck = make_deck(10007);
let instructions = fs::read_to_string("input.txt")?;
let deck = apply_instructions(deck, &instructions);
println!("index of 2019: {}", deck.iter().position(|&card| card == 2019).unwrap());
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use test_case::test_case;
#[test]
fn example_1() {
let deck = make_deck(10);
assert_eq!(deal_into_new(deck), vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
}
#[test_case(10, 3 => vec![3, 4, 5, 6, 7, 8, 9, 0, 1, 2]; "example 2")]
#[test_case(10, -4 => vec![6, 7, 8, 9, 0, 1, 2, 3, 4, 5]; "example 3")]
fn test_cut(size: usize, index: isize) -> Deck {
let deck = make_deck(size);
cut(deck, index)
}
#[test]
fn example_4() {
assert_eq!(deal_with_incr(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9], 3), vec![0, 7, 4, 1, 8, 5, 2, 9, 6, 3]);
}
#[test_case(10, "deal with increment 7
deal into new stack
deal into new stack" => vec![0, 3, 6, 9, 2, 5, 8, 1, 4, 7]; "example 5")]
fn test_instructions(size: usize, instructions: &str) -> Deck {
let deck = make_deck(size);
apply_instructions(deck, instructions)
}
}
|
use bevy::{
prelude::*, //default bevy
input::{keyboard::KeyCode, Input},
};
use crate::{
systems::life::LIFE_FORM_SIZE,
DEFAULT_UNIVERSE_SIZE,
};
pub fn setup(mut commands: Commands) {
commands.spawn_bundle(Camera3dBundle {
projection: PerspectiveProjection {
near: 0.1,
far: 10000.0,
aspect_ratio: 16.0/9.0,
fov: std::f32::consts::FRAC_PI_3,
}
.into(),
transform: Transform::from_xyz(
DEFAULT_UNIVERSE_SIZE as f32*LIFE_FORM_SIZE/2.0,
DEFAULT_UNIVERSE_SIZE as f32*LIFE_FORM_SIZE/2.0,
DEFAULT_UNIVERSE_SIZE as f32*LIFE_FORM_SIZE*2.0
)
.looking_at(Vec3::new(DEFAULT_UNIVERSE_SIZE as f32*LIFE_FORM_SIZE/2.0,DEFAULT_UNIVERSE_SIZE as f32*LIFE_FORM_SIZE/2.0,0.0), Vec3::Y),
..default()
});
}
const ROTATE_SPEED: f32 = std::f32::consts::FRAC_1_PI/5.0;
pub fn move_camera_on_keyboard_input(
mut camera: Query<&mut Transform, With<Camera>>,
keys: Res<Input<KeyCode>>,
timer: Res<Time>,
) {
let move_factor = 1000.0 * timer.delta_seconds();
//let rotation_factor = 500.0 * timer.delta_seconds();
for mut transform in camera.iter_mut() {
//rotation
if keys.pressed(KeyCode::A) {
// look left
transform.rotate_local_y(ROTATE_SPEED);
} else if keys.pressed(KeyCode::D) {
// look right
transform.rotate_local_y(-ROTATE_SPEED);
}
// forward / backward
if keys.pressed(KeyCode::W) {
// forward
let move_cam = transform.forward() * move_factor;
transform.translation += move_cam;
} else if keys.pressed(KeyCode::S) {
// backward
let move_cam = transform.forward() * move_factor;
transform.translation -= move_cam;
}
//movement
if keys.pressed(KeyCode::Left) {
// moving left
let move_cam = transform.left() * move_factor;
transform.translation += move_cam;
} else if keys.pressed(KeyCode::Right) {
// moving right
let move_cam = transform.right() * move_factor;
transform.translation += move_cam;
}
if keys.pressed(KeyCode::Up) {
// moving up
transform.translation.y += move_factor;
} else if keys.pressed(KeyCode::Down) {
// moving down
transform.translation.y -= move_factor;
}
}
}
|
// Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT
// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD
// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,
// modified, or distributed except according to those terms. Please review the Licences for the
// specific language governing permissions and limitations relating to use of the SAFE Network
// Software.
extern crate gcc;
use std::env;
fn main() {
let mut cfg = gcc::Build::new();
if env::var("TARGET").unwrap().contains("android") {
cfg.include("native")
.file("native/ifaddrs.c")
.compile("libifaddrs.a");
}
}
|
/*!
```rudra-poc
[target]
crate = "rocket"
version = "0.4.4"
[[target.peer]]
crate = "rocket_codegen"
version = "0.4.4"
[[target.peer]]
crate = "rocket_http"
version = "0.4.4"
[test]
cargo_toolchain = "nightly"
[report]
issue_url = "https://github.com/SergioBenitez/Rocket/issues/1312"
issue_date = 2020-05-27
rustsec_url = "https://github.com/RustSec/advisory-db/pull/320"
rustsec_id = "RUSTSEC-2020-0028"
[[bugs]]
analyzer = "Manual"
bug_class = "Other"
rudra_report_locations = []
```
!*/
#![forbid(unsafe_code)]
use rocket::http::Header;
use rocket::local::Client;
use rocket::Request;
fn main() {
let client = Client::new(rocket::ignite()).unwrap();
// creates two LocalRequest instances that share the same Request pointer
let request1 = client.get("/").header(Header::new("key", "val1"));
let request2 = request1.clone();
// sanity check
assert_eq!(
request1.inner() as *const Request<'_>,
request2.inner() as *const Request<'_>
);
// save the iterator, which internally holds a slice
let mut iter = request1.inner().headers().get("key");
// insert headers to reallocate the header map
request2
.header(Header::new("1", "v1"))
.header(Header::new("2", "v2"))
.header(Header::new("3", "v3"))
.header(Header::new("key", "val2"));
// heap massage
let arr: [usize; 4] = [0, 0xcafebabe, 31337, 0]; // fake Cow
let addr = &arr as *const _ as usize;
let _v: Vec<usize> = vec![
0, 0, 0, 0, 0, addr, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
];
// iter is dangling now!
let s = iter.next().unwrap();
// address and length controlled
dbg!(s.as_ptr());
dbg!(s.len());
// segfaults
println!("{}", s);
}
|
use std::error::Error;
use std::fmt::{Display, Formatter};
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum BeanstalkdError {
ConnectionError,
UnknownStatusError(String),
RequestError,
}
impl Error for BeanstalkdError {
fn description(&self) -> &str {
match self {
BeanstalkdError::ConnectionError => "Connection error occurred",
BeanstalkdError::RequestError => "Request error occurred",
BeanstalkdError::UnknownStatusError(_) => "Unknown status",
}
}
}
impl Display for BeanstalkdError {
fn fmt(&self, formatter: &mut Formatter) -> ::std::fmt::Result {
let message = match self {
BeanstalkdError::ConnectionError => "Connection error occurred".to_string(),
BeanstalkdError::RequestError => "Request error occurred".to_string(),
BeanstalkdError::UnknownStatusError(status) => format!("Unknown status: {}", status),
};
message.fmt(formatter)
}
}
pub type BeanstalkdResult<T> = Result<T, BeanstalkdError>;
|
//! Loadable shader settings.
use crate::Shader;
use arctk::{
err::Error,
img::Gradient,
math::Pos3,
ord::{Link, Set, X, Y, Z},
};
use arctk_attr::input;
/// Colouring settings builder.
#[input]
pub struct ShaderLinker {
/// Sun position used for lighting calculations [m].
sun_pos: [f64; 3],
/// Relative ambient, diffuse, and occlusion lighting powers.
light: [f64; 3],
/// Relative ambient and direct shadowing powers.
shadow: [f64; 2],
/// Ambient lighting fraction.
spec_pow: i32,
/// Lighting and shadowing occlusion testing distances.
occ_dist: [f64; 2],
/// Effect fall-off rate.
fall_off: f64,
/// Optional number of soft shadowing samples, and angular radius [deg].
soft_shadow_samples: Option<(i32, f64)>,
/// Optional number of ambient shadowing samples and the scaling power.
ambient_shadow_samples: Option<(i32, i32)>,
/// Sky gradient.
sky_grad: String,
/// Data plotting gradient.
data_grad: String,
}
impl<'a> Link<'a, Gradient> for ShaderLinker {
type Inst = Shader<'a>;
#[inline]
fn requires(&self) -> Vec<String> {
vec![self.sky_grad.clone(), self.data_grad.clone()]
}
#[inline]
fn link(self, grads: &'a Set<Gradient>) -> Result<Self::Inst, Error> {
let soft_shadow_samples = if let Some((samples, rad)) = self.soft_shadow_samples {
Some((samples, rad.to_radians()))
} else {
None
};
Ok(Self::Inst::new(
Pos3::new(self.sun_pos[X], self.sun_pos[Y], self.sun_pos[Z]),
self.light,
self.shadow,
self.spec_pow,
self.occ_dist,
self.fall_off,
soft_shadow_samples,
self.ambient_shadow_samples,
grads
.get(&self.sky_grad)
.unwrap_or_else(|| panic!("Failed to link shader-gradient key: {}", self.sky_grad)),
grads.get(&self.data_grad).unwrap_or_else(|| {
panic!("Failed to link shader-gradient key: {}", self.data_grad)
}),
))
}
}
|
// Copyright 2016 Google Inc. All Rights Reserved.
//
// Licensed under the MIT License, <LICENSE or http://opensource.org/licenses/MIT>.
// This file may not be copied, modified, or distributed except according to those terms.
use bincode::SizeLimit;
use bincode::serde as bincode;
use byteorder::{BigEndian, WriteBytesExt};
use bytes::Buf;
use serde::Serialize;
use std::collections::VecDeque;
use std::mem;
use std::io::{self, Cursor};
mod try_write {
use bytes::Buf;
use protocol::MapNonBlock;
use std::io::{self, Write};
pub trait TryWrite {
fn try_write_buf<B: Buf>(&mut self, buf: &mut B) -> io::Result<Option<usize>>
where Self: Sized
{
let res = self.try_write(buf.bytes());
if let Ok(Some(cnt)) = res {
buf.advance(cnt);
}
res
}
fn try_write(&mut self, buf: &[u8]) -> io::Result<Option<usize>>;
}
impl<T: Write> TryWrite for T {
fn try_write(&mut self, src: &[u8]) -> io::Result<Option<usize>> {
self.write(src).map_non_block()
}
}
}
/// The means of communication between client and server.
#[derive(Clone, Debug)]
pub struct Packet {
/// (payload_len: u64, payload)
///
/// The payload is typically a serialized message.
pub buf: Cursor<Vec<u8>>,
}
impl Packet {
/// Creates a new packet, (len, payload)
pub fn serialize<S>(message: &S) -> Result<Packet, bincode::SerializeError>
where S: Serialize
{
let payload_len = bincode::serialized_size(message);
// (len, message)
let mut buf = Vec::with_capacity(mem::size_of::<u64>() + payload_len as usize);
buf.write_u64::<BigEndian>(payload_len).unwrap();
bincode::serialize_into(&mut buf, message, SizeLimit::Infinite)?;
Ok(Packet { buf: Cursor::new(buf) })
}
}
#[derive(Debug)]
enum NextWriteAction {
Stop,
Continue,
}
trait BufExt: Buf + Sized {
/// Writes data to stream. Returns Ok(true) if all data has been written or Ok(false) if
/// there's still data to write.
fn try_write<W: try_write::TryWrite>(&mut self, stream: &mut W) -> io::Result<NextWriteAction> {
while let Some(bytes_written) = stream.try_write_buf(self)? {
debug!("Writer: wrote {} bytes; {} remaining.",
bytes_written,
self.remaining());
if bytes_written == 0 {
trace!("Writer: would block.");
return Ok(NextWriteAction::Continue);
}
if !self.has_remaining() {
return Ok(NextWriteAction::Stop);
}
}
Ok(NextWriteAction::Continue)
}
}
impl<B: Buf> BufExt for B {}
#[derive(Debug)]
pub enum NextWriteState {
Nothing,
Next(Packet),
}
impl NextWriteState {
pub fn next<W: try_write::TryWrite>(state: &mut Option<Packet>,
socket: &mut W,
outbound: &mut VecDeque<Packet>)
-> io::Result<Option<()>> {
loop {
let update = match *state {
None => {
match outbound.pop_front() {
Some(packet) => {
let size = packet.buf.remaining() as u64;
debug_assert!(size >= mem::size_of::<u64>() as u64);
NextWriteState::Next(packet)
}
None => return Ok(Some(())),
}
}
Some(ref mut packet) => {
match packet.buf.try_write(socket)? {
NextWriteAction::Stop => NextWriteState::Nothing,
NextWriteAction::Continue => return Ok(None),
}
}
};
match update {
NextWriteState::Next(next) => *state = Some(next),
NextWriteState::Nothing => {
*state = None;
}
}
}
}
}
|
use crate::{alphabet::Alphabet, dfa::DFA};
use core::marker::PhantomData;
use valis_ds::set::Set;
pub struct DFADisplay<'d, D, A>(pub &'d D, PhantomData<A>);
impl<'d, D, A> From<&'d D> for DFADisplay<'d, D, A>
where
D: DFA<A>,
A: Alphabet,
{
fn from(src: &'d D) -> Self {
DFADisplay(src, PhantomData)
}
}
impl<'d, 'a, A, D> dot::GraphWalk<'a, D::ID, (D::ID, A, D::ID)> for DFADisplay<'d, D, A>
where
D: DFA<A>,
A: Alphabet,
{
fn nodes(&self) -> dot::Nodes<'a, D::ID> {
self.0.all_states().into_iter().collect::<Vec<_>>().into()
}
fn edges(&'a self) -> dot::Edges<'a, (D::ID, A, D::ID)> {
let mut edges = Vec::with_capacity(self.0.all_states().size() * A::num_symbols());
for node in self.0.all_states().into_iter() {
for sym in A::value_iter() {
edges.push((node, sym, unsafe { self.0.next_state_unchecked(node, sym) }))
}
}
edges.into()
}
fn source(&self, e: &(D::ID, A, D::ID)) -> D::ID {
e.0
}
fn target(&self, e: &(D::ID, A, D::ID)) -> D::ID {
e.2
}
}
impl<'d, 'a, A, D> dot::Labeller<'a, D::ID, (D::ID, A, D::ID)> for DFADisplay<'d, D, A>
where
D: DFA<A>,
A: Alphabet,
{
fn graph_id(&'a self) -> dot::Id<'a> {
dot::Id::new("dfa_graph").unwrap()
}
fn node_id(&'a self, n: &D::ID) -> dot::Id<'a> {
dot::Id::new(format!("N{:?}", *n)).unwrap()
}
fn node_label(&'a self, node: &D::ID) -> dot::LabelText<'a> {
let is_start = self.0.start_state() == *node;
let _is_dead = self
.0
.dead_state()
.map(|dead_state| dead_state == *node)
.unwrap_or(false);
let is_accept = self.0.accept_states().contains(node);
if is_start || is_accept {
dot::LabelText::HtmlStr(format!("<font color=\"white\">{:?}</font>", node).into())
} else {
dot::LabelText::LabelStr(format!("{:?}", node).into())
}
}
fn edge_label<'b>(&'b self, e: &(D::ID, A, D::ID)) -> dot::LabelText<'b> {
dot::LabelText::LabelStr(format!("{:?}", e.1).into())
}
fn edge_style(&'a self, _e: &(D::ID, A, D::ID)) -> dot::Style {
dot::Style::Bold
}
fn edge_color(&'a self, e: &(D::ID, A, D::ID)) -> Option<dot::LabelText<'a>> {
let num_symbols = A::num_symbols();
if 2 < num_symbols && num_symbols <= 8 {
Some(dot::LabelText::LabelStr(
format!("/accent{}/{}", num_symbols, e.1.to_usize() + 1).into(),
))
} else if num_symbols == 2 {
let val = e.1.to_usize();
let color = if val == 0 { "/piyg3/1" } else { "/piyg3/3" };
Some(dot::LabelText::LabelStr(color.into()))
} else {
None
}
}
fn node_style(&'a self, node: &D::ID) -> dot::Style {
let is_start = self.0.start_state() == *node;
let is_dead = self
.0
.dead_state()
.map(|dead_state| dead_state == *node)
.unwrap_or(false);
let is_accept = self.0.accept_states().contains(node);
match (is_start, is_dead, is_accept) {
(true, _, true) => dot::Style::Filled,
(_, _, true) => dot::Style::Filled,
(true, ..) => dot::Style::Filled,
(false, _, false) => dot::Style::Solid,
}
}
fn node_color(&'a self, node: &D::ID) -> Option<dot::LabelText<'a>> {
let is_start = self.0.start_state() == *node;
let is_dead = self
.0
.dead_state()
.map(|dead_state| dead_state == *node)
.unwrap_or(false);
let is_accept = self.0.accept_states().contains(node);
let shape = match (is_start, is_dead, is_accept) {
(true, _, true) => "purple",
(_, _, true) => "blue",
(true, ..) => "red",
(false, _, false) => "black",
};
Some(dot::LabelText::LabelStr(shape.into()))
}
fn node_shape(&'a self, node: &D::ID) -> Option<dot::LabelText<'a>> {
let is_start = self.0.start_state() == *node;
let is_dead = self
.0
.dead_state()
.map(|dead_state| dead_state == *node)
.unwrap_or(false);
let is_accept = self.0.accept_states().contains(node);
let shape = match (is_start, is_dead, is_accept) {
(_, true, _) => "doublecircle",
(..) => "circle",
};
Some(dot::LabelText::LabelStr(shape.into()))
}
}
|
use std::collections::HashMap;
use std::sync::{Arc, RwLock};
type Key = String;
type Val = Vec<u8>;
type Map = HashMap<Key, Val>;
pub type Cache = Arc<RwLock<Box<Map>>>;
#[derive(Debug)]
pub struct FileCache {
pub cache: Cache,
}
impl FileCache {
pub fn new() -> FileCache {
FileCache {
cache: Arc::new(RwLock::new(Box::new(HashMap::new()))),
}
}
} |
/// An enum to represent all characters in the Kannada block.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum Kannada {
/// \u{c80}: 'ಀ'
SignSpacingCandrabindu,
/// \u{c81}: 'ಁ'
SignCandrabindu,
/// \u{c82}: 'ಂ'
SignAnusvara,
/// \u{c83}: 'ಃ'
SignVisarga,
/// \u{c84}: '಄'
SignSiddham,
/// \u{c85}: 'ಅ'
LetterA,
/// \u{c86}: 'ಆ'
LetterAa,
/// \u{c87}: 'ಇ'
LetterI,
/// \u{c88}: 'ಈ'
LetterIi,
/// \u{c89}: 'ಉ'
LetterU,
/// \u{c8a}: 'ಊ'
LetterUu,
/// \u{c8b}: 'ಋ'
LetterVocalicR,
/// \u{c8c}: 'ಌ'
LetterVocalicL,
/// \u{c8e}: 'ಎ'
LetterE,
/// \u{c8f}: 'ಏ'
LetterEe,
/// \u{c90}: 'ಐ'
LetterAi,
/// \u{c92}: 'ಒ'
LetterO,
/// \u{c93}: 'ಓ'
LetterOo,
/// \u{c94}: 'ಔ'
LetterAu,
/// \u{c95}: 'ಕ'
LetterKa,
/// \u{c96}: 'ಖ'
LetterKha,
/// \u{c97}: 'ಗ'
LetterGa,
/// \u{c98}: 'ಘ'
LetterGha,
/// \u{c99}: 'ಙ'
LetterNga,
/// \u{c9a}: 'ಚ'
LetterCa,
/// \u{c9b}: 'ಛ'
LetterCha,
/// \u{c9c}: 'ಜ'
LetterJa,
/// \u{c9d}: 'ಝ'
LetterJha,
/// \u{c9e}: 'ಞ'
LetterNya,
/// \u{c9f}: 'ಟ'
LetterTta,
/// \u{ca0}: 'ಠ'
LetterTtha,
/// \u{ca1}: 'ಡ'
LetterDda,
/// \u{ca2}: 'ಢ'
LetterDdha,
/// \u{ca3}: 'ಣ'
LetterNna,
/// \u{ca4}: 'ತ'
LetterTa,
/// \u{ca5}: 'ಥ'
LetterTha,
/// \u{ca6}: 'ದ'
LetterDa,
/// \u{ca7}: 'ಧ'
LetterDha,
/// \u{ca8}: 'ನ'
LetterNa,
/// \u{caa}: 'ಪ'
LetterPa,
/// \u{cab}: 'ಫ'
LetterPha,
/// \u{cac}: 'ಬ'
LetterBa,
/// \u{cad}: 'ಭ'
LetterBha,
/// \u{cae}: 'ಮ'
LetterMa,
/// \u{caf}: 'ಯ'
LetterYa,
/// \u{cb0}: 'ರ'
LetterRa,
/// \u{cb1}: 'ಱ'
LetterRra,
/// \u{cb2}: 'ಲ'
LetterLa,
/// \u{cb3}: 'ಳ'
LetterLla,
/// \u{cb5}: 'ವ'
LetterVa,
/// \u{cb6}: 'ಶ'
LetterSha,
/// \u{cb7}: 'ಷ'
LetterSsa,
/// \u{cb8}: 'ಸ'
LetterSa,
/// \u{cb9}: 'ಹ'
LetterHa,
/// \u{cbc}: '಼'
SignNukta,
/// \u{cbd}: 'ಽ'
SignAvagraha,
/// \u{cbe}: 'ಾ'
VowelSignAa,
/// \u{cbf}: 'ಿ'
VowelSignI,
/// \u{cc0}: 'ೀ'
VowelSignIi,
/// \u{cc1}: 'ು'
VowelSignU,
/// \u{cc2}: 'ೂ'
VowelSignUu,
/// \u{cc3}: 'ೃ'
VowelSignVocalicR,
/// \u{cc4}: 'ೄ'
VowelSignVocalicRr,
/// \u{cc6}: 'ೆ'
VowelSignE,
/// \u{cc7}: 'ೇ'
VowelSignEe,
/// \u{cc8}: 'ೈ'
VowelSignAi,
/// \u{cca}: 'ೊ'
VowelSignO,
/// \u{ccb}: 'ೋ'
VowelSignOo,
/// \u{ccc}: 'ೌ'
VowelSignAu,
/// \u{ccd}: '್'
SignVirama,
/// \u{cd5}: 'ೕ'
LengthMark,
/// \u{cd6}: 'ೖ'
AiLengthMark,
/// \u{cde}: 'ೞ'
LetterFa,
/// \u{ce0}: 'ೠ'
LetterVocalicRr,
/// \u{ce1}: 'ೡ'
LetterVocalicLl,
/// \u{ce2}: 'ೢ'
VowelSignVocalicL,
/// \u{ce3}: 'ೣ'
VowelSignVocalicLl,
/// \u{ce6}: '೦'
DigitZero,
/// \u{ce7}: '೧'
DigitOne,
/// \u{ce8}: '೨'
DigitTwo,
/// \u{ce9}: '೩'
DigitThree,
/// \u{cea}: '೪'
DigitFour,
/// \u{ceb}: '೫'
DigitFive,
/// \u{cec}: '೬'
DigitSix,
/// \u{ced}: '೭'
DigitSeven,
/// \u{cee}: '೮'
DigitEight,
/// \u{cef}: '೯'
DigitNine,
/// \u{cf1}: 'ೱ'
SignJihvamuliya,
/// \u{cf2}: 'ೲ'
SignUpadhmaniya,
}
impl Into<char> for Kannada {
fn into(self) -> char {
match self {
Kannada::SignSpacingCandrabindu => 'ಀ',
Kannada::SignCandrabindu => 'ಁ',
Kannada::SignAnusvara => 'ಂ',
Kannada::SignVisarga => 'ಃ',
Kannada::SignSiddham => '಄',
Kannada::LetterA => 'ಅ',
Kannada::LetterAa => 'ಆ',
Kannada::LetterI => 'ಇ',
Kannada::LetterIi => 'ಈ',
Kannada::LetterU => 'ಉ',
Kannada::LetterUu => 'ಊ',
Kannada::LetterVocalicR => 'ಋ',
Kannada::LetterVocalicL => 'ಌ',
Kannada::LetterE => 'ಎ',
Kannada::LetterEe => 'ಏ',
Kannada::LetterAi => 'ಐ',
Kannada::LetterO => 'ಒ',
Kannada::LetterOo => 'ಓ',
Kannada::LetterAu => 'ಔ',
Kannada::LetterKa => 'ಕ',
Kannada::LetterKha => 'ಖ',
Kannada::LetterGa => 'ಗ',
Kannada::LetterGha => 'ಘ',
Kannada::LetterNga => 'ಙ',
Kannada::LetterCa => 'ಚ',
Kannada::LetterCha => 'ಛ',
Kannada::LetterJa => 'ಜ',
Kannada::LetterJha => 'ಝ',
Kannada::LetterNya => 'ಞ',
Kannada::LetterTta => 'ಟ',
Kannada::LetterTtha => 'ಠ',
Kannada::LetterDda => 'ಡ',
Kannada::LetterDdha => 'ಢ',
Kannada::LetterNna => 'ಣ',
Kannada::LetterTa => 'ತ',
Kannada::LetterTha => 'ಥ',
Kannada::LetterDa => 'ದ',
Kannada::LetterDha => 'ಧ',
Kannada::LetterNa => 'ನ',
Kannada::LetterPa => 'ಪ',
Kannada::LetterPha => 'ಫ',
Kannada::LetterBa => 'ಬ',
Kannada::LetterBha => 'ಭ',
Kannada::LetterMa => 'ಮ',
Kannada::LetterYa => 'ಯ',
Kannada::LetterRa => 'ರ',
Kannada::LetterRra => 'ಱ',
Kannada::LetterLa => 'ಲ',
Kannada::LetterLla => 'ಳ',
Kannada::LetterVa => 'ವ',
Kannada::LetterSha => 'ಶ',
Kannada::LetterSsa => 'ಷ',
Kannada::LetterSa => 'ಸ',
Kannada::LetterHa => 'ಹ',
Kannada::SignNukta => '಼',
Kannada::SignAvagraha => 'ಽ',
Kannada::VowelSignAa => 'ಾ',
Kannada::VowelSignI => 'ಿ',
Kannada::VowelSignIi => 'ೀ',
Kannada::VowelSignU => 'ು',
Kannada::VowelSignUu => 'ೂ',
Kannada::VowelSignVocalicR => 'ೃ',
Kannada::VowelSignVocalicRr => 'ೄ',
Kannada::VowelSignE => 'ೆ',
Kannada::VowelSignEe => 'ೇ',
Kannada::VowelSignAi => 'ೈ',
Kannada::VowelSignO => 'ೊ',
Kannada::VowelSignOo => 'ೋ',
Kannada::VowelSignAu => 'ೌ',
Kannada::SignVirama => '್',
Kannada::LengthMark => 'ೕ',
Kannada::AiLengthMark => 'ೖ',
Kannada::LetterFa => 'ೞ',
Kannada::LetterVocalicRr => 'ೠ',
Kannada::LetterVocalicLl => 'ೡ',
Kannada::VowelSignVocalicL => 'ೢ',
Kannada::VowelSignVocalicLl => 'ೣ',
Kannada::DigitZero => '೦',
Kannada::DigitOne => '೧',
Kannada::DigitTwo => '೨',
Kannada::DigitThree => '೩',
Kannada::DigitFour => '೪',
Kannada::DigitFive => '೫',
Kannada::DigitSix => '೬',
Kannada::DigitSeven => '೭',
Kannada::DigitEight => '೮',
Kannada::DigitNine => '೯',
Kannada::SignJihvamuliya => 'ೱ',
Kannada::SignUpadhmaniya => 'ೲ',
}
}
}
impl std::convert::TryFrom<char> for Kannada {
type Error = ();
fn try_from(c: char) -> Result<Self, Self::Error> {
match c {
'ಀ' => Ok(Kannada::SignSpacingCandrabindu),
'ಁ' => Ok(Kannada::SignCandrabindu),
'ಂ' => Ok(Kannada::SignAnusvara),
'ಃ' => Ok(Kannada::SignVisarga),
'಄' => Ok(Kannada::SignSiddham),
'ಅ' => Ok(Kannada::LetterA),
'ಆ' => Ok(Kannada::LetterAa),
'ಇ' => Ok(Kannada::LetterI),
'ಈ' => Ok(Kannada::LetterIi),
'ಉ' => Ok(Kannada::LetterU),
'ಊ' => Ok(Kannada::LetterUu),
'ಋ' => Ok(Kannada::LetterVocalicR),
'ಌ' => Ok(Kannada::LetterVocalicL),
'ಎ' => Ok(Kannada::LetterE),
'ಏ' => Ok(Kannada::LetterEe),
'ಐ' => Ok(Kannada::LetterAi),
'ಒ' => Ok(Kannada::LetterO),
'ಓ' => Ok(Kannada::LetterOo),
'ಔ' => Ok(Kannada::LetterAu),
'ಕ' => Ok(Kannada::LetterKa),
'ಖ' => Ok(Kannada::LetterKha),
'ಗ' => Ok(Kannada::LetterGa),
'ಘ' => Ok(Kannada::LetterGha),
'ಙ' => Ok(Kannada::LetterNga),
'ಚ' => Ok(Kannada::LetterCa),
'ಛ' => Ok(Kannada::LetterCha),
'ಜ' => Ok(Kannada::LetterJa),
'ಝ' => Ok(Kannada::LetterJha),
'ಞ' => Ok(Kannada::LetterNya),
'ಟ' => Ok(Kannada::LetterTta),
'ಠ' => Ok(Kannada::LetterTtha),
'ಡ' => Ok(Kannada::LetterDda),
'ಢ' => Ok(Kannada::LetterDdha),
'ಣ' => Ok(Kannada::LetterNna),
'ತ' => Ok(Kannada::LetterTa),
'ಥ' => Ok(Kannada::LetterTha),
'ದ' => Ok(Kannada::LetterDa),
'ಧ' => Ok(Kannada::LetterDha),
'ನ' => Ok(Kannada::LetterNa),
'ಪ' => Ok(Kannada::LetterPa),
'ಫ' => Ok(Kannada::LetterPha),
'ಬ' => Ok(Kannada::LetterBa),
'ಭ' => Ok(Kannada::LetterBha),
'ಮ' => Ok(Kannada::LetterMa),
'ಯ' => Ok(Kannada::LetterYa),
'ರ' => Ok(Kannada::LetterRa),
'ಱ' => Ok(Kannada::LetterRra),
'ಲ' => Ok(Kannada::LetterLa),
'ಳ' => Ok(Kannada::LetterLla),
'ವ' => Ok(Kannada::LetterVa),
'ಶ' => Ok(Kannada::LetterSha),
'ಷ' => Ok(Kannada::LetterSsa),
'ಸ' => Ok(Kannada::LetterSa),
'ಹ' => Ok(Kannada::LetterHa),
'಼' => Ok(Kannada::SignNukta),
'ಽ' => Ok(Kannada::SignAvagraha),
'ಾ' => Ok(Kannada::VowelSignAa),
'ಿ' => Ok(Kannada::VowelSignI),
'ೀ' => Ok(Kannada::VowelSignIi),
'ು' => Ok(Kannada::VowelSignU),
'ೂ' => Ok(Kannada::VowelSignUu),
'ೃ' => Ok(Kannada::VowelSignVocalicR),
'ೄ' => Ok(Kannada::VowelSignVocalicRr),
'ೆ' => Ok(Kannada::VowelSignE),
'ೇ' => Ok(Kannada::VowelSignEe),
'ೈ' => Ok(Kannada::VowelSignAi),
'ೊ' => Ok(Kannada::VowelSignO),
'ೋ' => Ok(Kannada::VowelSignOo),
'ೌ' => Ok(Kannada::VowelSignAu),
'್' => Ok(Kannada::SignVirama),
'ೕ' => Ok(Kannada::LengthMark),
'ೖ' => Ok(Kannada::AiLengthMark),
'ೞ' => Ok(Kannada::LetterFa),
'ೠ' => Ok(Kannada::LetterVocalicRr),
'ೡ' => Ok(Kannada::LetterVocalicLl),
'ೢ' => Ok(Kannada::VowelSignVocalicL),
'ೣ' => Ok(Kannada::VowelSignVocalicLl),
'೦' => Ok(Kannada::DigitZero),
'೧' => Ok(Kannada::DigitOne),
'೨' => Ok(Kannada::DigitTwo),
'೩' => Ok(Kannada::DigitThree),
'೪' => Ok(Kannada::DigitFour),
'೫' => Ok(Kannada::DigitFive),
'೬' => Ok(Kannada::DigitSix),
'೭' => Ok(Kannada::DigitSeven),
'೮' => Ok(Kannada::DigitEight),
'೯' => Ok(Kannada::DigitNine),
'ೱ' => Ok(Kannada::SignJihvamuliya),
'ೲ' => Ok(Kannada::SignUpadhmaniya),
_ => Err(()),
}
}
}
impl Into<u32> for Kannada {
fn into(self) -> u32 {
let c: char = self.into();
let hex = c
.escape_unicode()
.to_string()
.replace("\\u{", "")
.replace("}", "");
u32::from_str_radix(&hex, 16).unwrap()
}
}
impl std::convert::TryFrom<u32> for Kannada {
type Error = ();
fn try_from(u: u32) -> Result<Self, Self::Error> {
if let Ok(c) = char::try_from(u) {
Self::try_from(c)
} else {
Err(())
}
}
}
impl Iterator for Kannada {
type Item = Self;
fn next(&mut self) -> Option<Self> {
let index: u32 = (*self).into();
use std::convert::TryFrom;
Self::try_from(index + 1).ok()
}
}
impl Kannada {
/// The character with the lowest index in this unicode block
pub fn new() -> Self {
Kannada::SignSpacingCandrabindu
}
/// The character's name, in sentence case
pub fn name(&self) -> String {
let s = std::format!("Kannada{:#?}", self);
string_morph::to_sentence_case(&s)
}
}
|
/// Replaces `*t` with `f` applied to the original `*t`,
pub fn replace_with<T, F: FnOnce(T) -> T>(t: &mut T, f: F) {
let raw = t as *mut T;
unsafe{
std::ptr::write(raw, f(std::ptr::read(raw)));
}
} |
#![allow(dead_code, unused_imports)]
use std::fs::File;
use std::io;
use std::io::Write;
use std::ops::Div;
use rand::{random, Rng, SeedableRng};
use rayon::prelude::*;
use crate::camera::{Camera, Viewport};
use crate::hittable::{HitRecord, Hittable};
use crate::image::Image;
use crate::material::Material;
use crate::ray::Ray;
use crate::shapes::Sphere;
use crate::vec3::{Color, Point3, Vector3};
use crate::world::World;
use rand::prelude::StdRng;
mod camera;
mod hittable;
mod image;
mod material;
mod ray;
mod shapes;
mod vec3;
mod world;
pub(crate) type Num = f32;
pub fn translate_color(pixel_color: Color, samples: usize) -> [u8; 3] {
let scale = 1.0 / (samples as Num);
let [r, g, b] = [
(pixel_color.x * scale).sqrt(),
(pixel_color.y * scale).sqrt(),
(pixel_color.z * scale).sqrt(),
];
let ir = (255.99 * r.clamp(0.0, 0.99)) as u8;
let ig = (255.99 * g.clamp(0.0, 0.99)) as u8;
let ib = (255.99 * b.clamp(0.0, 0.99)) as u8;
[ir, ig, ib]
}
pub(crate) fn write_color<W: io::Write>(writer: &mut W, pixel_color: Color, samples: usize) {
let scale = 1.0 / (samples as Num);
let [r, g, b] = [
(pixel_color.x * scale).sqrt(),
(pixel_color.y * scale).sqrt(),
(pixel_color.z * scale).sqrt(),
];
let ir = (255.99 * r.clamp(0.0, 0.99)) as u8;
let ig = (255.99 * g.clamp(0.0, 0.99)) as u8;
let ib = (255.99 * b.clamp(0.0, 0.99)) as u8;
writer
.write_all(format!("{} {} {}\n", ir, ig, ib).as_ref())
.unwrap();
}
pub(crate) fn ray_color(ray: Ray, world: &World, depth: usize) -> Color {
let mut rng = rand::thread_rng();
if depth <= 0 {
return Color::zeros();
}
if let Some(rec) = world.hit(ray, 0.0001..Num::MAX) {
if let Some((attenuation, scattered)) = rec.mat.scatter(ray, rec, &mut rng) {
return attenuation * ray_color(scattered, world, depth - 1);
}
return Color::zeros();
}
// Blue to white gradient if the ray does not hit anything
let unit_direction = ray.direction.normalize();
let t = 0.5 * (unit_direction.y + 1.0);
(1.0 - t) * Color::new(1.0, 1.0, 1.0) + t * Color::new(0.5, 0.7, 1.0)
}
pub(crate) fn render(
world: &World,
image: Image,
camera: Camera,
samples: usize,
depth: usize,
) -> io::Result<()> {
let mut file = File::create("final_scene.ppm")?;
eprintln!("{}x{}", image.width, image.height);
file.write_fmt(format_args!("P3\n{} {}\n255\n", image.width, image.height))?;
let mut im: Vec<Color> = vec![Color::zeros(); image.height * image.width];
im.par_chunks_exact_mut(image.width)
.rev()
.enumerate()
.for_each(|(j, slice)| {
slice.into_par_iter().enumerate().for_each(|(i, pixel)| {
for _ in 0..samples {
let u = (i as Num + random::<Num>()) / (image.width - 1) as Num;
let v = (j as Num + random::<Num>()) / (image.height - 1) as Num;
let r = camera.cast_ray(u, v);
*pixel += ray_color(r, world, depth);
}
})
});
for c in im {
let [r, g, b] = translate_color(c, samples);
file.write_fmt(format_args!("{} {} {}\n", r, g, b))?;
}
eprintln!("\nDone\n");
Ok(())
}
fn main() {
//https://raytracing.github.io/books/RayTracingInOneWeekend.html
const SAMPLES: usize = 500;
const MAX_DEPTH: usize = 50;
let image = Image::from_width(Camera::ASPECT_RATIO, 1200);
let camera = Camera::new(
Point3::new(13., 2., 3.),
Point3::new(0., 0., 0.),
Vector3::new(0., 1., 0.),
20.0,
0.0 .. 0.001
);
let mut rng = rand::rngs::StdRng::seed_from_u64(0xFACE);
let _world = World(vec![
// Ground
Box::new(Sphere::new(
Point3::new(0., -100.5, -1.),
100.,
Material::Lambertian {
albedo: Color::new(0.8, 0.8, 0.),
},
)),
//Left
Box::new(Sphere::new(
Point3::new(-1., 0., -1.),
0.5,
Material::Dielectric { ir: 1.5 },
)),
// Inner left
Box::new(Sphere::new(
Point3::new(-1., 0., -1.),
-0.4,
Material::Dielectric { ir: 1.5 },
)),
// Center
Box::new(Sphere::new(
Point3::new(0., 0., -1.),
0.5,
Material::Lambertian {
albedo: Color::new(0.1, 0.2, 0.5),
},
)),
//Right
Box::new(Sphere::new(
Point3::new(1., 0., -1.),
0.5,
Material::Metal {
albedo: Color::new(0.8, 0.6, 0.2),
fuzz: 0.0,
},
)),
]);
let random_scene = final_scene(&mut rng);
render(&random_scene, image, camera, SAMPLES, MAX_DEPTH).unwrap();
}
fn final_scene<R: Rng>(rng: &mut R) -> World {
let mut world = World(vec![]);
let ground = Box::new(Sphere::new(
Point3::new(0., -1000., 0.),
1000.,
Material::Lambertian {
albedo: Color::from_elem(0.5),
},
));
world.add(ground);
for a in -11..11 {
for b in -11..11 {
let choose_mat = rng.gen::<Num>();
let center = Point3::new(
a as Num + 0.9 * rng.gen::<Num>(),
0.2,
b as Num + 0.9 * rng.gen::<Num>(),
);
if (center - Point3::new(4., 0.2, 0.)).length() > 0.9 {
match choose_mat {
c if c < 0.8 => {
let albedo = Color::random(rng) * Color::random(rng);
world.add(Box::new(Sphere::new(
center,
0.2,
Material::Lambertian { albedo },
)));
}
c if c < 0.95 => {
let albedo = Color::random_double(0.5..1., rng);
let fuzz = rng.gen_range(0. ..0.5);
world.add(Box::new(Sphere::new(
center,
0.2,
Material::Metal { albedo, fuzz },
)));
}
_ => {
world.add(Box::new(Sphere::new(
center,
0.2,
Material::Dielectric { ir: 1.5 },
)));
}
}
}
}
}
world.add(Box::new(Sphere::new(
Point3::new(0., 1., 0.),
1.,
Material::Dielectric { ir: 1.5 },
)));
world.add(Box::new(Sphere::new(
Point3::new(-4., 1., 0.),
1.,
Material::Lambertian {
albedo: Color::new(0.4, 0.2, 0.1),
},
)));
world.add(Box::new(Sphere::new(
Point3::new(4., 1., 0.),
1.,
Material::Metal {
albedo: Color::new(0.7, 0.6, 0.5),
fuzz: 0.0,
},
)));
world
}
|
use crate::lib::environment::Environment;
use crate::lib::error::DfxResult;
use crate::lib::identity::identity_utils::call_sender;
use crate::lib::operations::canister::deploy_canisters;
use crate::lib::provider::create_agent_environment;
use crate::lib::root_key::fetch_root_key_if_needed;
use crate::util::clap::validators::cycle_amount_validator;
use crate::util::expiry_duration;
use clap::Clap;
use tokio::runtime::Runtime;
/// Deploys all or a specific canister from the code in your project. By default, all canisters are deployed.
#[derive(Clap)]
pub struct DeployOpts {
/// Specifies the name of the canister you want to deploy.
/// If you don’t specify a canister name, all canisters defined in the dfx.json file are deployed.
canister_name: Option<String>,
/// Specifies the argument to pass to the method.
#[clap(long)]
argument: Option<String>,
/// Specifies the data type for the argument when making the call using an argument.
#[clap(long, requires("argument"), possible_values(&["idl", "raw"]))]
argument_type: Option<String>,
/// Override the compute network to connect to. By default, the local network is used.
/// A valid URL (starting with `http:` or `https:`) can be used here, and a special
/// ephemeral network will be created specifically for this request. E.g.
/// "http://localhost:12345/" is a valid network name.
#[clap(long)]
network: Option<String>,
/// Specifies the initial cycle balance to deposit into the newly created canister.
/// The specified amount needs to take the canister create fee into account.
/// This amount is deducted from the wallet's cycle balance.
#[clap(long, validator(cycle_amount_validator))]
with_cycles: Option<String>,
/// Specify a wallet canister id to perform the call.
/// If none specified, defaults to use the selected Identity's wallet canister.
#[clap(long)]
wallet: Option<String>,
/// Performs the call with the user Identity as the Sender of messages.
/// Bypasses the Wallet canister.
#[clap(long, conflicts_with("wallet"))]
no_wallet: bool,
}
pub fn exec(env: &dyn Environment, opts: DeployOpts) -> DfxResult {
let env = create_agent_environment(env, opts.network)?;
let timeout = expiry_duration();
let canister_name = opts.canister_name.as_deref();
let argument = opts.argument.as_deref();
let argument_type = opts.argument_type.as_deref();
let with_cycles = opts.with_cycles.as_deref();
let runtime = Runtime::new().expect("Unable to create a runtime");
let default_wallet_proxy = true;
let call_sender = runtime.block_on(call_sender(
&env,
&opts.wallet,
opts.no_wallet,
default_wallet_proxy,
))?;
runtime.block_on(fetch_root_key_if_needed(&env))?;
runtime.block_on(deploy_canisters(
&env,
canister_name,
argument,
argument_type,
timeout,
with_cycles,
&call_sender,
))
}
|
use proc_macro::TokenStream;
mod paths;
mod utils;
mod impl_check;
mod impl_command;
mod impl_hook;
use impl_check::impl_check;
use impl_command::impl_command;
use impl_hook::impl_hook;
#[proc_macro_attribute]
pub fn command(attr: TokenStream, input: TokenStream) -> TokenStream {
match impl_command(attr.into(), input.into()) {
Ok(stream) => stream.into(),
Err(err) => err.to_compile_error().into(),
}
}
#[proc_macro_attribute]
pub fn check(attr: TokenStream, input: TokenStream) -> TokenStream {
match impl_check(attr.into(), input.into()) {
Ok(stream) => stream.into(),
Err(err) => err.to_compile_error().into(),
}
}
#[proc_macro_attribute]
pub fn hook(attr: TokenStream, input: TokenStream) -> TokenStream {
match impl_hook(attr.into(), input.into()) {
Ok(stream) => stream.into(),
Err(err) => err.to_compile_error().into(),
}
}
|
use crate::{
tokenizers::FullPathForChars,
field_paths::{FieldPaths,FieldPath},
};
#[allow(unused_imports)]
use core_extensions::SelfOps;
use proc_macro2::TokenStream as TokenStream2;
use quote::quote_spanned;
use syn::{
parse::{self,Parse,ParseStream},
punctuated::Punctuated,
Ident,Token,
};
pub(crate) fn impl_(parsed: NameAliases) -> Result<TokenStream2,syn::Error> {
// This uses the full path to each character to allow aliases
// with the same name as the characters themselves.
let char_verbosity=FullPathForChars::Yes;
let mut doc_fp_inner=String::new();
parsed.aliases.iter()
.map(move|alias|{
doc_fp_inner.clear();
alias.value.write_fp_inside(&mut doc_fp_inner);
let alias_name=&alias.name;
let field_name=alias.value.type_tokens(char_verbosity);
let value=alias.value.inferred_expression_tokens();
Ok(quote_spanned!(alias_name.span()=>
#[allow(non_camel_case_types,dead_code)]
#[doc="An alias for `structural::FP!("]
#[doc=#doc_fp_inner]
#[doc=")`"]
pub type #alias_name=#field_name;
#[allow(non_upper_case_globals,dead_code)]
#[doc="An alias for `structural::fp!("]
#[doc=#doc_fp_inner]
#[doc=")`"]
pub const #alias_name:#alias_name=#value;
))
})
.collect()
}
#[derive(Debug)]
pub(crate) struct NameAliases{
aliases:Punctuated<NameAlias,Token![,]>,
}
#[derive(Debug)]
pub(crate) struct NameAlias{
name:Ident,
value:FieldPaths,
}
impl Parse for NameAliases{
fn parse(input: ParseStream) -> parse::Result<Self>{
Ok(NameAliases{
aliases:input.parse_terminated(Parse::parse)?,
})
}
}
impl Parse for NameAlias{
fn parse(input: ParseStream) -> parse::Result<Self>{
let name=input.parse::<Ident>()?;
let value=if input.peek(Token!(=)) {
input.parse::<Token![=]>()?;
if input.peek(syn::token::Paren) {
let content;
let _=syn::parenthesized!(content in input);
content.parse::<FieldPaths>()?
}else {
input.parse::<FieldPath>()?
.piped(FieldPaths::from_path)
}
}else{
FieldPaths::from_ident(name.clone())
};
Ok(Self{name,value})
}
} |
extern crate piston_window;
mod draw;
mod paddle;
mod game;
mod block;
mod ball;
mod rectangle;
use piston_window::*;
use piston_window::types::Color;
use game::Game;
const BACK_COLOR: Color = [0.5,0.5,0.5,1.0];
fn main() {
let (width, height) = (400.0, 400.0);
let mut window: PistonWindow = WindowSettings::new(
"Breakout",
[width, height],
).exit_on_esc(true)
.build()
.unwrap();
//let mut keys = HashSet::new();
let mut game = Game::new(width, height);
while let Some(event) = window.next() {
/*match event {
Event::Input(inp, None) => {
match inp {
Event::Input::PressEvent(Button::Keyboard(key)) => {
game.keys.insert(key);
}
Event::Input::ReleaseEvent(Button::Keyboard(key)) => {
game.keys.remove(&key);
}
}
}
}*/
if let Some(Button::Keyboard(key)) = event.press_args() {
//game.key_pressed(key);
game.keys.insert(key);
}
if let Some(Button::Keyboard(key)) = event.release_args() {
game.keys.remove(&key);
}
window.draw_2d(&event, |c, g, _| {
clear(BACK_COLOR, g);
game.draw(&c, g);
});
event.update(|arg| {
game.update(arg.dt);
});
}
} |
use crate::inv::{Inventory, Release, BUCKET, REGION};
use crate::vrs::Version;
use anyhow::{anyhow, Error};
use chrono::{DateTime, Utc};
use regex::Regex;
use serde::Deserialize;
use std::convert::TryFrom;
use url::Url;
/// Content Node in the XML document returned by Amazon S3 for a public bucket.
#[derive(Debug, Deserialize)]
#[serde(rename_all = "PascalCase")]
#[allow(dead_code)]
pub struct Content {
// Examples of keys:
// * yarn/release/yarn-v0.16.0.tar.gz
// * node/release/darwin-x64/node-v0.10.0-darwin-x64.tar.gz
key: String,
last_modified: DateTime<Utc>,
#[serde(rename = "ETag")]
etag: String,
size: usize,
storage_class: String,
}
/// Representation of the XML document returned by Amazon S3 for a public bucket.
#[derive(Debug, Deserialize)]
#[serde(rename_all = "PascalCase")]
#[allow(dead_code)]
pub struct ListBucketResult {
name: String,
prefix: String,
max_keys: usize,
is_truncated: bool,
continuation_token: Option<String>,
next_continuation_token: Option<String>,
contents: Vec<Content>,
}
/// Represents contents of a bucket's prefix
#[derive(Debug)]
pub struct BucketContent {
prefix: String,
contents: Vec<Content>,
}
impl TryFrom<BucketContent> for Inventory {
type Error = anyhow::Error;
/// # Failures
/// These are the possible errors that can occur when calling this function:
///
/// * Regex missing matching captures against `Content#key`
/// * `Version::parse` fails to parse the version found in the `Content#key`
fn try_from(result: BucketContent) -> Result<Self, Self::Error> {
let inv = &result.prefix;
let version_regex = match inv.as_str() {
"yarn" => Regex::new(
r"yarn/(?P<channel>\w+)/yarn-v(?P<version>\d+\.\d+\.\d+(-[\w\.]+)?)\.tar\.gz",
),
"node" => Regex::new(
r"node/(?P<channel>\w+)/(?P<arch>[\w-]+)/node-v(?P<version>\d+\.\d+\.\d+)[\w-]+\.tar\.gz",
),
i => Err(regex::Error::Syntax(format!(
"Unknown S3 inventory prefix: {i}"
))),
}?;
let releases: Result<Vec<Release>, Error> = result
.contents
.iter()
.map(|content| {
let capture = version_regex.captures(&content.key).ok_or_else(|| {
anyhow!("No valid version found in content: {}", &content.key)
})?;
let channel = capture.name("channel").ok_or_else(|| {
anyhow!("Could not find channel in content: {}", &content.key)
})?;
let version_number = capture.name("version").ok_or_else(|| {
anyhow!("Could not find version in content: {}", &content.key)
})?;
let arch = capture.name("arch");
Ok(Release {
arch: arch.map(|a| a.as_str().to_string()),
version: Version::parse(version_number.as_str())?,
channel: channel.as_str().to_string(),
// Amazon S3 returns a quoted string for ETags
etag: Some(content.etag.replace('\"', "")),
url: format!(
"https://{BUCKET}.s3.{REGION}.amazonaws.com/{}",
&content.key
),
})
})
.collect();
Ok(Self {
name: inv.to_string(),
releases: releases?,
})
}
}
/// Fetch all s3 buckets for a given folder.
///
/// # Errors
///
/// * Failed http requests
/// * Parsing errors for an invalid S3 URL
/// * XML Parsing errors for an invalid XML document
pub fn list_objects<B: AsRef<str>, R: AsRef<str>, P: AsRef<str>>(
b: B,
r: R,
p: P,
) -> Result<BucketContent, Error> {
let bucket = b.as_ref();
let region = r.as_ref();
let prefix = p.as_ref();
let mut bucket_content = BucketContent {
prefix: prefix.to_string(),
contents: vec![],
};
let mut continuation_token = String::new();
loop {
let mut params = vec![("prefix", prefix), ("list-type", "2")];
if !continuation_token.is_empty() {
params.push(("continuation-token", continuation_token.as_str()));
}
let url = Url::parse_with_params(
&format!("https://{bucket}.s3.{region}.amazonaws.com/"),
params,
)?;
let res = ureq::get(url.as_ref()).call()?.into_string()?;
let mut page: ListBucketResult = serde_xml_rs::from_str(&res)?;
bucket_content.contents.append(&mut page.contents);
match page.next_continuation_token {
None => break,
Some(token) => continuation_token = token,
}
}
Ok(bucket_content)
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::Utc;
#[test]
fn it_converts_s3_result_to_inv() {
let etag = "739c200ca266266ff150ad4d89b83205";
let content = Content {
key: "node/release/darwin-x64/node-v0.10.0-darwin-x64.tar.gz".to_string(),
last_modified: Utc::now(),
etag: format!("\"{etag}\""),
size: 4_065_868,
storage_class: "STANDARD".to_string(),
};
let bucket_content = BucketContent {
prefix: "node".to_string(),
contents: vec![content],
};
let result = Inventory::try_from(bucket_content);
assert!(result.is_ok());
if let Ok(inv) = result {
assert_eq!(Some(String::from(etag)), inv.releases[0].etag);
}
}
#[test]
fn it_converts_s3_result_to_inv_arch_optional() {
let content = Content {
key: "yarn/release/yarn-v0.16.0.tar.gz".to_string(),
last_modified: Utc::now(),
etag: "\"e4cc76bea92fabb664edadc4db14a8f2\"".to_string(),
size: 7_234_362,
storage_class: "STANDARD".to_string(),
};
let bucket_content = BucketContent {
prefix: "yarn".to_string(),
contents: vec![content],
};
let result = Inventory::try_from(bucket_content);
assert!(result.is_ok());
}
#[test]
fn it_fails_to_convert_s3_result_to_inv() {
let content = Content {
key: "garbage".to_string(),
last_modified: Utc::now(),
etag: "\"e4cc76bea92fabb664edadc4db14a8f2\"".to_string(),
size: 7_234_362,
storage_class: "STANDARD".to_string(),
};
let bucket_content = BucketContent {
prefix: "yarn".to_string(),
contents: vec![content],
};
let result = Inventory::try_from(bucket_content);
assert!(result.is_err());
}
}
|
//! The `BonsaiDb` Server.
#![forbid(unsafe_code)]
#![warn(
clippy::cargo,
missing_docs,
// clippy::missing_docs_in_private_items,
clippy::nursery,
clippy::pedantic,
future_incompatible,
rust_2018_idioms,
)]
#![cfg_attr(doc, deny(rustdoc::all))]
#![allow(
clippy::missing_errors_doc, // TODO clippy::missing_errors_doc
clippy::missing_panics_doc, // TODO clippy::missing_panics_doc
clippy::option_if_let_else,
clippy::module_name_repetitions,
)]
mod async_io_util;
mod backend;
/// Command-line interface for the server.
#[cfg(feature = "cli")]
pub mod cli;
mod config;
mod error;
mod server;
#[cfg(feature = "pubsub")]
pub use server::ServerSubscriber;
pub use self::{
backend::{Backend, ConnectionHandling},
config::{Configuration, StorageConfiguration},
error::Error,
server::{ConnectedClient, CustomServer, Server, ServerDatabase, Transport},
};
#[cfg(test)]
mod tests;
#[cfg(any(feature = "test-util", test))]
pub mod test_util;
pub use bonsaidb_local as local;
|
// Problem 19 - Counting Sundays
//
// You are given the following information, but you may prefer to do some research
// for yourself.
//
// - 1 Jan 1900 was a Monday.
//
// - Thirty days has September,
// April, June and November.
// All the rest have thirty-one,
// Saving February alone,
// Which has twenty-eight, rain or shine.
// And on leap years, twenty-nine.
//
// - A leap year occurs on any year evenly divisible by 4, but not on a
// century unless it is divisible by 400.
//
// How many Sundays fell on the first of the month during the twentieth century
// (1 Jan 1901 to 31 Dec 2000)?
fn main() {
println!("{}", solution());
}
fn solution() -> u32 {
let mut first_sundays = 0;
// 0 = Sunday, 1 = Monday, ...
let mut current_first = 1;
for year in 1900..2001 {
for month in 1..13 {
if year > 1900 && current_first == 0 {
first_sundays += 1;
}
current_first += day_count(year, month);
current_first %= 7;
}
}
first_sundays
}
fn day_count(year: u32, month: u32) -> u32 {
if [1, 3, 5, 7, 8, 10, 12].contains(&month) {
return 31;
} else if [4, 6, 9, 11].contains(&month) {
return 30;
}
if year % 400 == 0 || (year % 100 != 0 && year % 4 == 0) {
return 29;
}
28
}
|
use super::RingBuffer;
pub struct ResizedRingBuffer {
// 在resize之后,不能立即释放ringbuffer,因为有可能还有外部引用。
// 需要在所有的processed的字节都被ack之后(通过reset_read)才能释放
max_processed: usize,
old: Vec<RingBuffer>,
inner: RingBuffer,
}
use std::ops::{Deref, DerefMut};
impl Deref for ResizedRingBuffer {
type Target = RingBuffer;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl DerefMut for ResizedRingBuffer {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl ResizedRingBuffer {
pub fn new() -> Self {
Self {
old: Vec::new(),
max_processed: std::usize::MAX,
inner: RingBuffer::with_capacity(32 * 1024),
}
}
pub fn resize(&mut self) -> bool {
let cap = self.inner.cap() * 2;
// 8MB对于在线业务的一次请求,是一个足够大的值。
if cap > 8 * 1024 * 1024 {
log::info!("ringbuffer: overflow. {}", cap);
return false;
}
log::info!("ringbuffer: resize buffer to {}", cap);
let new = self.inner.resize(cap);
let old = std::mem::replace(&mut self.inner, new);
self.max_processed = old.processed();
self.old.push(old);
true
}
#[inline(always)]
pub fn reset_read(&mut self, read: usize) {
self.inner.reset_read(read);
if read >= self.max_processed {
self.old.clear();
self.max_processed = std::usize::MAX;
}
}
}
|
fn is_valid_part_01(input: &str) -> bool {
let data: Vec<&str> = input
.split_whitespace()
.collect();
let range: Vec<usize> = data[0]
.split('-')
.map(|s| s.parse().unwrap())
.collect();
let character = &data[1][0..1];
let count = data[2]
.chars()
.filter(|x| x.to_string() == character)
.count();
range[0] <= count && count <= range[1]
}
pub fn part_01(input: &str) -> usize {
let mut count = 0;
for line in input.split('\n') {
if line.len() == 0 {
continue;
}
if is_valid_part_01(line) {
count += 1;
}
}
count
}
fn is_valid_part_02(input: &str) -> bool {
let data: Vec<&str> = input
.split_whitespace()
.collect();
let positions: Vec<usize> = data[0]
.split('-')
.map(|s| s.parse().unwrap())
.collect();
let character = &data[1][0..1];
(data[2]
.chars()
.nth(positions[0]-1).unwrap()
.to_string() == character)
^ (data[2]
.chars()
.nth(positions[1]-1).unwrap()
.to_string() == character)
}
pub fn part_02(input: &str) -> usize {
let mut count = 0;
for line in input.split('\n') {
if line.len() == 0 {
continue;
}
if is_valid_part_02(line) {
count += 1;
}
}
count
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.