text stringlengths 8 4.13M |
|---|
use tui::{
backend::Backend,
layout::{Alignment, Constraint, Direction, Layout, Rect},
style::{Color, Modifier, Style},
text::{Span, Spans},
widgets::{Block, Borders, List, ListItem, Paragraph, Wrap},
Frame,
};
use super::{db, note::Note, AppState};
pub fn draw<B: Backend>(f: &mut Frame<B>, state: &mut AppState) {
let chunks = Layout::default()
.margin(1)
.direction(Direction::Horizontal)
.constraints([Constraint::Percentage(20), Constraint::Percentage(80)].as_ref())
.split(f.size());
draw_notes_list(f, &chunks[0], state);
draw_current_note_panel(f, &chunks[1], state);
}
fn draw_current_note_panel<B: Backend>(
f: &mut Frame<B>,
layout_chunk: &Rect,
state: &mut AppState,
) {
let parent_layout = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Percentage(10), Constraint::Percentage(90)].as_ref())
.split(*layout_chunk);
draw_current_note_title(f, parent_layout[0], state);
draw_current_note_contents(f, parent_layout[1], state);
}
fn draw_current_note_title<B: Backend>(f: &mut Frame<B>, layout_chunk: Rect, state: &mut AppState) {
let mut text: Vec<Span> = Vec::new();
let note: Note;
if let Some(id) = state.selected_note_id() {
note = db::get_note(id).expect("Unable to access the current selected note");
note.title
.lines()
.for_each(|line| text.push(Span::raw(line)));
}
let paragraph_contents = vec![Spans::from(text)];
let paragraph = Paragraph::new(paragraph_contents)
.alignment(Alignment::Left)
.block(Block::default().borders(Borders::ALL).title("Title"))
.wrap(Wrap { trim: true });
f.render_widget(paragraph, layout_chunk);
}
fn draw_current_note_contents<B: Backend>(
f: &mut Frame<B>,
layout_chunk: Rect,
state: &mut AppState,
) {
let available_width_for_text = (layout_chunk.width - 2) as usize;
let mut text: Vec<Span> = Vec::new();
let note: Note;
if let Some(id) = state.selected_note_id() {
note = db::get_note(id).expect("Unable to access the current selected note");
note.contents
.lines()
.for_each(|line| text.push(Span::raw(line)));
}
let paragraph_contents = vec![Spans::from(text)];
let paragraph = Paragraph::new(paragraph_contents)
.alignment(Alignment::Left)
.block(Block::default().borders(Borders::ALL).title("Content"))
.wrap(Wrap { trim: true });
f.render_widget(paragraph, layout_chunk);
// current cursor location is incorrect because the note_state and the list_state are not
// synced
let cursor_loc = state.content_cursor_loc(available_width_for_text);
// takes the coordinate of the frame, adds the cursor position in the respective direction,
// then adds 1 to account for the space where the next character will be entered
let x_offset = layout_chunk.x + cursor_loc.0 + 1;
let y_offset = layout_chunk.y + cursor_loc.1 + 1;
f.set_cursor(x_offset, y_offset);
}
fn draw_notes_list<B: Backend>(f: &mut Frame<B>, layout_chunk: &Rect, state: &mut AppState) {
let notes = db::get_all_notes().expect("There was an error retrieving your notes");
state.notes = notes;
let list_items: Vec<ListItem> = state
.notes
.iter()
.map(|note| ListItem::new(¬e.title[..]))
.collect();
let list = List::new(list_items)
.block(Block::default().borders(Borders::ALL).title("Notes"))
.highlight_symbol("> ")
.highlight_style(
Style::default()
.fg(Color::Green)
.add_modifier(Modifier::BOLD),
);
f.render_stateful_widget(list, *layout_chunk, &mut state.list_state);
}
|
use block_cipher_trait::generic_array::GenericArray;
use block_cipher_trait::generic_array::typenum::U8;
use block_cipher_trait::generic_array::typenum::U32;
use stream_cipher::NewStreamCipher;
use stream_cipher::SyncStreamCipherSeek;
#[cfg(cargo_feature = "zeroize")]
use zeroize::Zeroize;
#[cfg(cargo_feature = "zeroize")]
use std::ops::Drop;
const KEY_BITS: usize = 256;
const KEY_BYTES: usize = KEY_BITS / 8;
const KEY_WORDS: usize = KEY_BYTES / 4;
const IV_BITS: usize = 64;
const IV_BYTES: usize = IV_BITS / 8;
const IV_WORDS: usize = IV_BYTES / 4;
const STATE_BYTES: usize = 64;
const STATE_WORDS: usize = STATE_BYTES / 4;
pub struct SalsaFamilyState {
pub block: [u32; STATE_WORDS],
pub key: [u32; KEY_WORDS],
pub iv: [u32; IV_WORDS],
pub block_idx: u64,
pub offset: usize
}
pub trait SalsaFamilyCipher {
#[inline]
fn next_block(&mut self);
#[inline]
fn offset(&self) -> usize;
#[inline]
fn set_offset(&mut self, offset: usize);
#[inline]
fn block_word(&self, idx: usize) -> u32;
fn process(&mut self, data: &mut [u8]) {
let datalen = data.len();
let mut i = 0;
let initial_offset = self.offset();
let initial_word_offset = initial_offset % 4;
let initial_word_remaining = 4 - initial_word_offset;
let final_offset = initial_offset + datalen % STATE_BYTES;
if datalen > initial_word_remaining {
// If the length of data is longer than remaining bytes in
// the current word.
let has_initial_words = initial_word_offset != 0;
let initial_word_idx = initial_offset / 4;
let mut word_idx = initial_offset / 4;
// First, use the remaining part of the current word.
if has_initial_words {
let word = self.block_word(initial_word_idx);
for j in initial_word_offset .. 4 {
data[i] = data[i] ^ ((word >> (j * 8)) & 0xff) as u8;
i += 1;
}
word_idx += 1;
}
// Check if the remaining data is longer than one block.
let (leftover_words, leftover_bytes) =
if (datalen - i) / 4 > STATE_WORDS - (word_idx % STATE_WORDS) {
// If the length of the remaining data is longer
// than the remaining words in the current block.
// Use the remaining part of the current block
if word_idx != STATE_WORDS {
for j in word_idx .. STATE_WORDS {
let word = self.block_word(j);
for k in 0 .. 4 {
data[i] = data[i] ^
((word >> (k * 8)) & 0xff) as u8;
i += 1;
}
}
self.next_block();
} else {
word_idx = 0;
self.next_block();
}
let nblocks = (datalen - i) / 64;
let leftover = (datalen - i) % 64;
// Process whole blocks.
for _ in 0 .. nblocks {
for j in 0 .. STATE_WORDS {
let word = self.block_word(j);
for k in 0 .. 4 {
data[i] = data[i] ^
((word >> (k * 8)) & 0xff) as u8;
i += 1;
}
}
self.next_block();
}
let leftover_words = leftover / 4;
// Process the leftover part of a block
for j in 0 .. leftover_words {
let word = self.block_word(j);
for k in 0 .. 4 {
data[i] = data[i] ^
((word >> (k * 8)) & 0xff) as u8;
i += 1;
}
}
(leftover_words, leftover % 4)
} else {
// If the remaining data is less than the length
// of a block.
let nwords = (datalen - i) / 4;
let leftover_bytes = (datalen - i) % 4;
// If we walked off the end of this block,
// generate the next one.
if has_initial_words && word_idx == STATE_WORDS {
word_idx = 0;
self.next_block();
}
// Use the remaining part of the current block
for j in word_idx .. word_idx + nwords {
let word = self.block_word(j);
for k in 0 .. 4 {
data[i] = data[i] ^
((word >> (k * 8)) & 0xff) as u8;
i += 1;
}
}
if word_idx + nwords == STATE_WORDS {
self.next_block();
}
((word_idx + nwords) % STATE_WORDS, leftover_bytes)
};
// Process the leftover part of a single word
let word = self.block_word(leftover_words);
for j in 0 .. leftover_bytes {
data[i] = data[i] ^ ((word >> (j * 8)) & 0xff) as u8;
i += 1;
}
self.set_offset((4 * leftover_words) + leftover_bytes);
} else {
// If the total length is less than the remaining bytes in
// a word.
let word_idx = self.offset() / 4 % STATE_WORDS;
let word = self.block_word(word_idx);
for j in initial_word_offset .. initial_word_offset + datalen {
data[i] = data[i] ^ ((word >> (j * 8)) & 0xff) as u8;
i += 1;
}
if final_offset == STATE_BYTES {
self.next_block();
}
}
// Set the offset and generate the next block if we ran over.
self.set_offset(final_offset % STATE_BYTES);
}
}
impl SalsaFamilyState {
pub fn create() -> SalsaFamilyState {
SalsaFamilyState { block: [0; STATE_WORDS],
key: [0; KEY_WORDS],
iv: [0; IV_WORDS],
block_idx: 0,
offset: 0 }
}
pub fn init(&mut self, key: &[u8], iv: &[u8],
block_idx: u64, offset: usize) {
for i in 0 .. KEY_WORDS {
self.key[i] = key[4 * i] as u32 & 0xff |
(key[(4 * i) + 1] as u32 & 0xff) << 8 |
(key[(4 * i) + 2] as u32 & 0xff) << 16 |
(key[(4 * i) + 3] as u32 & 0xff) << 24;
}
for i in 0 .. IV_WORDS {
self.iv[i] = iv[4 * i] as u32 & 0xff |
(iv[(4 * i) + 1] as u32 & 0xff) << 8 |
(iv[(4 * i) + 2] as u32 & 0xff) << 16 |
(iv[(4 * i) + 3] as u32 & 0xff) << 24;
}
self.block_idx = block_idx;
self.offset = offset;
}
}
impl NewStreamCipher for SalsaFamilyState {
/// Key size in bytes
type KeySize = U32;
/// Nonce size in bytes
type NonceSize = U8;
fn new(key: &GenericArray<u8, Self::KeySize>,
iv: &GenericArray<u8, Self::NonceSize>) -> Self {
let mut out = SalsaFamilyState::create();
out.init(key.as_slice(), iv.as_slice(), 0, 0);
out
}
}
impl SyncStreamCipherSeek for SalsaFamilyState {
fn current_pos(&self) -> u64 {
self.block_idx << 6 | self.offset as u64
}
fn seek(&mut self, pos: u64) {
self.offset = (pos & 0x3f) as usize;
self.block_idx = pos >> 6;
}
}
#[cfg(cargo_feature = "zeroize")]
impl Zeroize for SalsaFamilyState {
fn zeroize(&mut self) {
self.block.zeroize();
self.key.zeroize();
self.iv.zeroize();
self.block_idx.zeroize();
self.offset.zeroize();
}
}
#[cfg(cargo_feature = "zeroize")]
impl Drop for SalsaFamilyState {
fn drop(&mut self) {
self.zeroize();
}
}
|
/*
Copyright (c) 2023 Uber Technologies, Inc.
<p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain a copy of the License at
<p>http://www.apache.org/licenses/LICENSE-2.0
<p>Unless required by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing permissions and
limitations under the License.
*/
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
use colored::Colorize;
use getset::Getters;
use itertools::Itertools;
use jwalk::WalkDir;
use log::{debug, trace};
use regex::Regex;
use crate::{
models::capture_group_patterns::CGPattern, models::piranha_arguments::PiranhaArguments,
models::scopes::ScopeQueryGenerator, utilities::read_file,
};
use super::{
capture_group_patterns::CompiledCGPattern, language::PiranhaLanguage, rule::InstantiatedRule,
};
use glob::Pattern;
/// This maintains the state for Piranha.
#[derive(Debug, Getters, Default)]
pub(crate) struct RuleStore {
// Caches the compiled tree-sitter queries.
rule_query_cache: HashMap<String, CompiledCGPattern>,
// Current global rules to be applied.
#[get = "pub"]
global_rules: Vec<InstantiatedRule>,
#[get = "pub"]
language: PiranhaLanguage,
}
impl RuleStore {
pub(crate) fn new(args: &PiranhaArguments) -> RuleStore {
let mut rule_store = RuleStore {
language: args.language().clone(),
..Default::default()
};
for rule in args.rule_graph().rules().clone() {
if *rule.is_seed_rule() {
rule_store.add_to_global_rules(&InstantiatedRule::new(&rule, &args.input_substitutions()));
}
}
trace!("Rule Store {}", format!("{rule_store:#?}"));
rule_store
}
/// Add a new global rule, along with grep heuristics (If it doesn't already exist)
pub(crate) fn add_to_global_rules(&mut self, rule: &InstantiatedRule) {
let r = rule.clone();
if !self.global_rules.iter().any(|r| {
r.name().eq(&rule.name()) && r.replace().eq(&rule.replace()) && r.query().eq(&rule.query())
}) {
#[rustfmt::skip]
debug!("{}", format!("Added Global Rule : {:?} - {}", r.name(), r.query().pattern()).bright_blue());
self.global_rules.push(r);
}
}
/// Get the compiled query for the `query_str` from the cache
/// else compile it, add it to the cache and return it.
pub(crate) fn query(&mut self, cg_pattern: &CGPattern) -> &CompiledCGPattern {
let pattern = cg_pattern.pattern();
if pattern.starts_with("rgx ") {
return &*self
.rule_query_cache
.entry(pattern)
.or_insert_with(|| CompiledCGPattern::R(cg_pattern.extract_regex().unwrap()));
}
if pattern.starts_with("cs ") {
return &*self
.rule_query_cache
.entry(pattern)
.or_insert_with(|| CompiledCGPattern::M(cg_pattern.extract_concrete_syntax()));
}
&*self
.rule_query_cache
.entry(pattern.to_string())
.or_insert_with(|| CompiledCGPattern::Q(self.language.create_query(pattern)))
}
// For the given scope level, get the ScopeQueryGenerator from the `scope_config.toml` file
pub(crate) fn get_scope_query_generators(&self, scope_level: &str) -> Vec<ScopeQueryGenerator> {
self
.language()
.scopes()
.iter()
.find(|level| level.name().eq(scope_level))
.map(|scope| scope.rules().to_vec())
.unwrap_or_else(Vec::new)
}
/// To create the current set of global rules, certain substitutions were applied.
/// This method creates a regex pattern matching these substituted values.
///
/// At the directory level, we would always look to perform global rules. However this is expensive because
/// it requires parsing each file. To overcome this, we apply this simple
/// heuristic to find the (upper bound) files that would match one of our current global rules.
/// This heuristic reduces the number of files to parse.
///
pub(crate) fn get_grep_heuristics(&self) -> Regex {
let reg_x = self
.global_rules()
.iter()
.flat_map(|r| r.substitutions().values())
.sorted()
//Remove duplicates
.dedup()
//FIXME: Dirty trick to remove true and false. Ideally, grep heuristic could be a field in itself for a rule.
// Since not all "holes" could be used as grep heuristic.
.filter(|x| {
!x.is_empty() && !x.to_lowercase().eq("true") && !x.to_lowercase().as_str().eq("false")
})
.join("|");
Regex::new(reg_x.as_str()).unwrap()
}
/// Checks if any global rule has a hole
pub(crate) fn any_global_rules_has_holes(&self) -> bool {
self.global_rules().iter().any(|x| !x.holes().is_empty())
}
/// Gets all the files from the code base that (i) have the language appropriate file extension, and (ii) contains the grep pattern.
/// Note that `WalkDir` traverses the directory with parallelism.
/// If all the global rules have no holes (i.e. we will have no grep patterns), we will try to find a match for each global rule in every file in the target.
pub(crate) fn get_relevant_files(
&self, paths_to_codebase: &Vec<String>, include: &Vec<Pattern>, exclude: &Vec<Pattern>,
) -> HashMap<PathBuf, String> {
let mut candidate_files: HashMap<PathBuf, String> = HashMap::new();
for p2codebase in paths_to_codebase {
if !Path::new(p2codebase).exists() {
panic!("Path to codebase does not exist: {}", p2codebase);
}
let _paths_to_codebase = Path::new(p2codebase).to_path_buf();
// If the path to codebase is a file, and the language can parse it, then add it to the files
if _paths_to_codebase.is_file() && self.language().can_parse(&_paths_to_codebase) {
candidate_files.insert(
_paths_to_codebase.clone(),
read_file(&_paths_to_codebase).unwrap(),
);
continue;
}
candidate_files.extend(self.get_candidate_files_from_dir(p2codebase, include, exclude));
}
// Filter the files based on the global rules with holes
let final_file_set = self.filter_files_based_on_global_rule_holes(candidate_files);
debug!(
"{}",
format!("{} files will be analyzed.", final_file_set.len()).green()
);
final_file_set
}
// If there are global rules with holes (grep patterns containing placeholders), filter files to find matches for those patterns in the target.
// Otherwise, report all the files in the target.
fn filter_files_based_on_global_rule_holes(
&self, candidate_files: HashMap<PathBuf, String>,
) -> HashMap<PathBuf, String> {
if self.any_global_rules_has_holes() {
let pattern = self.get_grep_heuristics();
return candidate_files
.iter()
// Filter the files containing the desired regex pattern
.filter(|x| pattern.is_match(x.1.as_str()))
.map(|(x, y)| (x.clone(), y.clone()))
.collect();
}
candidate_files
}
/// Gets all the files from the code base that (i) have the language appropriate file extension, and (ii) satisfy include/exclude glob pattern
fn get_candidate_files_from_dir(
&self, p2codebase: &String, include: &Vec<Pattern>, exclude: &Vec<Pattern>,
) -> HashMap<PathBuf, String> {
let mut _files: HashMap<PathBuf, String> = WalkDir::new(p2codebase)
// walk over the entire code base
.into_iter()
// ignore errors
.filter_map(|e| e.ok())
// only retain the included paths (if any)
.filter(|f| include.is_empty() || include.iter().any(|p| p.matches_path(&f.path())))
// filter out all excluded paths (if any)
.filter(|f| exclude.is_empty() || exclude.iter().all(|p| !p.matches_path(&f.path())))
// filter files with the desired extension
.filter(|de| self.language().can_parse(&de.path()))
// read the file
.map(|f| (f.path(), read_file(&f.path()).unwrap()))
.collect();
_files
}
}
|
use serde::{Deserialize, Serialize};
use std::convert::AsRef;
#[derive(Debug, Clone)]
pub enum PackageManager {
Apt,
Dnf,
Pacman,
Yum,
Unknown,
}
impl AsRef<str> for PackageManager {
fn as_ref(&self) -> &str {
match self {
Self::Apt => "apt-get",
Self::Dnf => "dnf",
Self::Pacman => "pacman",
Self::Yum => "yum",
Self::Unknown => "",
}
}
}
#[allow(dead_code)]
impl PackageManager {
pub fn install_args(&self) -> Vec<&'static str> {
match self {
Self::Apt => vec!["install", "-y"],
Self::Dnf => vec!["install", "-y"],
Self::Pacman => vec!["-S"],
Self::Yum => vec!["install", "-y"],
Self::Unknown => vec![],
}
}
pub fn update_repos_args(&self) -> Vec<&'static str> {
match self {
Self::Apt => vec!["update", "-y"],
Self::Dnf | Self::Yum => vec!["clean", "metadata"],
Self::Pacman => vec!["-Sy"],
Self::Unknown => vec![],
}
}
pub fn upgrade_packages_args(&self) -> Vec<&'static str> {
match self {
Self::Apt => vec!["dist-upgrade", "-y"],
Self::Dnf | Self::Yum => vec!["update", "-y"],
Self::Pacman => vec!["-Syu"],
Self::Unknown => vec![],
}
}
}
// enum holding version of os
#[derive(Debug, Deserialize, Clone, Serialize)]
pub enum Os {
Arch(String),
Centos(String),
Debian(String),
Fedora(String),
Redhat(String),
Ubuntu(String),
Unknown,
}
impl AsRef<str> for Os {
fn as_ref(&self) -> &str {
match self {
Os::Arch(_) => "arch",
Os::Centos(_) => "centos",
Os::Debian(_) => "debian",
Os::Fedora(_) => "fedora",
Os::Redhat(_) => "redhat",
Os::Ubuntu(_) => "ubuntu",
Os::Unknown => "unknown",
}
}
}
impl Os {
pub fn from(os: Option<String>, version: Option<String>) -> Os {
if let Some(os) = os {
let version = version.unwrap_or_default();
match &os[..] {
"arch" => Os::Arch(version),
"centos" => Os::Centos(version),
"debian" => Os::Debian(version),
"fedora" => Os::Fedora(version),
"redhat" => Os::Redhat(version),
_ => Os::Unknown,
}
} else {
Os::Unknown
}
}
pub fn os_ver(&self) -> &str {
match self {
Os::Arch(v) => v.as_str(),
Os::Centos(v) => v.as_str(),
Os::Debian(v) => v.as_str(),
Os::Fedora(v) => v.as_str(),
Os::Redhat(v) => v.as_str(),
Os::Ubuntu(v) => v.as_str(),
Os::Unknown => "",
}
}
#[allow(dead_code)]
pub fn package_manager(&self) -> PackageManager {
match self {
Os::Arch(_) => PackageManager::Pacman,
Os::Debian(_) | Os::Ubuntu(_) => PackageManager::Apt,
Os::Redhat(v) | Os::Centos(v) | Os::Fedora(v) if v == "8" => PackageManager::Dnf,
Os::Redhat(_) | Os::Centos(_) | Os::Fedora(_) => PackageManager::Yum,
Os::Unknown => PackageManager::Unknown,
}
}
}
|
#[cfg(test)]
use crate::model::Group;
use crate::solutions::k;
use crate::tests::samples;
use crate::tests::samples::{superman, wonder_woman, catman, batman, flash, deadshot, kid_flash, robin};
use crate::model::Alignment::{Good, Evil};
#[test]
fn should_return_map_with_empty_vec_if_supers_is_empty() {
let supers = Vec::new();
let result = k::group_sidekicks_by_alignment(&supers);
assert_eq!(result.len(), 2);
assert!(result.get(&Evil).unwrap().is_empty());
assert!(result.get(&Good).unwrap().is_empty());
}
#[test]
fn should_return_map_with_empty_vec_if_supers_have_no_sidekicks() {
let supers = vec![superman(), wonder_woman(), catman()];
let result = k::group_sidekicks_by_alignment(&supers);
assert_eq!(result.len(), 2);
assert!(result.get(&Evil).unwrap().is_empty());
assert!(result.get(&Good).unwrap().is_empty());
}
#[test]
fn should_return_all_supers_who_have_sidekicks() {
let supers = vec![superman(), flash(), wonder_woman(), catman(), batman(), deadshot()];
let result = k::group_sidekicks_by_alignment(&supers);
assert_eq!(result.len(), 2);
assert!(result.get(&Evil).unwrap().is_empty());
let good_sidekicks = result.get(&Good).unwrap();
assert_ne!(good_sidekicks.is_empty(), true);
assert_eq!(good_sidekicks.len(), 2);
assert_eq!(*good_sidekicks.first().unwrap(), &kid_flash());
assert_eq!(*good_sidekicks.last().unwrap(), &robin());
}
|
// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
use kvproto::import_sstpb::*;
use tempfile::Builder;
use test_sst_importer::*;
#[allow(dead_code)]
#[path = "../../integrations/import/util.rs"]
mod util;
use self::util::{check_ingested_kvs, new_cluster_and_tikv_import_client};
// Opening sst writer involves IO operation, it may block threads for a while.
// Test if download sst works when opening sst writer is blocked.
#[test]
fn test_download_sst_blocking_sst_writer() {
let (_cluster, ctx, tikv, import) = new_cluster_and_tikv_import_client();
let temp_dir = Builder::new()
.prefix("test_download_sst_blocking_sst_writer")
.tempdir()
.unwrap();
let sst_path = temp_dir.path().join("test.sst");
let sst_range = (0, 100);
let (mut meta, _) = gen_sst_file(sst_path, sst_range);
meta.set_region_id(ctx.get_region_id());
meta.set_region_epoch(ctx.get_region_epoch().clone());
// Sleep 20s, make sure it is large than grpc_keepalive_timeout (3s).
let sst_writer_open_fp = "on_open_sst_writer";
fail::cfg(sst_writer_open_fp, "sleep(20000)").unwrap();
// Now perform a proper download.
let mut download = DownloadRequest::default();
download.set_sst(meta.clone());
download.set_storage_backend(external_storage::make_local_backend(temp_dir.path()));
download.set_name("test.sst".to_owned());
download.mut_sst().mut_range().set_start(vec![sst_range.1]);
download
.mut_sst()
.mut_range()
.set_end(vec![sst_range.1 + 1]);
download.mut_sst().mut_range().set_start(Vec::new());
download.mut_sst().mut_range().set_end(Vec::new());
let result = import.download(&download).unwrap();
assert!(!result.get_is_empty());
assert_eq!(result.get_range().get_start(), &[sst_range.0]);
assert_eq!(result.get_range().get_end(), &[sst_range.1 - 1]);
fail::remove(sst_writer_open_fp);
// Do an ingest and verify the result is correct.
let mut ingest = IngestRequest::default();
ingest.set_context(ctx.clone());
ingest.set_sst(meta);
let resp = import.ingest(&ingest).unwrap();
assert!(!resp.has_error());
check_ingested_kvs(&tikv, &ctx, sst_range);
}
|
use crate::benchmarker::{modes, Benchmarker};
use crate::docker::docker_config::DockerConfig;
use crate::error::ToolsetError::UnknownBenchmarkerModeError;
use crate::error::ToolsetResult;
use crate::io::get_tfb_dir;
use crate::{io, metadata, options};
/// Runs the CLI matching the arguments/options passed and handling each.
pub fn run() -> ToolsetResult<()> {
let mut app = options::parse();
let matches = app.clone().get_matches();
if matches.is_present(options::args::AUDIT) {
// todo
println!("AUDIT");
Ok(())
} else if matches.is_present(options::args::CLEAN) {
let mut tfb_dir = get_tfb_dir()?;
tfb_dir.push("results");
std::fs::remove_dir_all(&tfb_dir)?;
Ok(())
} else if matches.is_present(options::args::LIST_FRAMEWORKS) {
io::print_all_frameworks()
} else if matches.is_present(options::args::LIST_TESTS) {
io::print_all_tests()
} else if let Some(framework) = matches.value_of(options::args::LIST_TESTS_FOR_FRAMEWORK) {
io::print_all_tests_for_framework(framework)
} else if let Some(tag) = matches.value_of(options::args::LIST_TESTS_WITH_TAG) {
io::print_all_tests_with_tag(tag)
} else if matches.is_present(options::args::PARSE_RESULTS) {
// todo
println!("PARSE_RESULTS");
Ok(())
} else if let Some(mode) = matches.value_of(options::args::MODE) {
let docker_config = DockerConfig::new(&matches);
let projects = metadata::list_projects_to_run(&matches);
let mut benchmarker = Benchmarker::new(docker_config, projects, mode);
match mode {
modes::BENCHMARK => benchmarker.benchmark(),
modes::VERIFY | modes::CICD => benchmarker.verify(),
modes::DEBUG => benchmarker.debug(),
_ => Err(UnknownBenchmarkerModeError(mode.to_string())),
}
} else {
app.print_help().unwrap();
Ok(())
}
}
|
// Copyright (c) 2018, The rav1e contributors. All rights reserved
//
// This source code is subject to the terms of the BSD 2 Clause License and
// the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
// was not distributed with this source code in the LICENSE file, you can
// obtain it at www.aomedia.org/license/software. If the Alliance for Open
// Media Patent License 1.0 was not distributed with this source code in the
// PATENTS file, you can obtain it at www.aomedia.org/license/patent.
#![deny(missing_docs)]
/// Color model information
pub mod color;
#[cfg(test)]
mod test;
pub use color::*;
use arrayvec::ArrayVec;
use bitstream_io::*;
use itertools::Itertools;
use num_derive::*;
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use serde_derive::{Deserialize, Serialize};
use crate::context::*;
use crate::context::{FrameBlocks, SuperBlockOffset, TileSuperBlockOffset};
use crate::cpu_features::CpuFeatureLevel;
use crate::dist::get_satd;
use crate::encoder::*;
use crate::frame::*;
use crate::metrics::calculate_frame_psnr;
use crate::partition::*;
use crate::predict::PredictionMode;
use crate::rate::RCState;
use crate::rate::FRAME_NSUBTYPES;
use crate::rate::FRAME_SUBTYPE_I;
use crate::rate::FRAME_SUBTYPE_P;
use crate::rate::FRAME_SUBTYPE_SEF;
use crate::scenechange::SceneChangeDetector;
use crate::stats::EncoderStats;
use crate::tiling::{Area, TileRect, TilingInfo};
use crate::transform::TxSize;
use crate::util::Pixel;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::sync::Arc;
use std::{cmp, fmt, io};
// We add 1 to rdo_lookahead_frames in a bunch of places.
const MAX_RDO_LOOKAHEAD_FRAMES: usize = usize::max_value() - 1;
// TODO: use the num crate?
/// A rational number.
#[derive(Clone, Copy, Debug)]
#[repr(C)]
pub struct Rational {
/// Numerator.
pub num: u64,
/// Denominator.
pub den: u64,
}
impl Rational {
/// Creates a rational number from the given numerator and denominator.
pub fn new(num: u64, den: u64) -> Self {
Rational { num, den }
}
/// Returns a rational number that is the reciprocal of the given one.
pub fn from_reciprocal(reciprocal: Self) -> Self {
Rational { num: reciprocal.den, den: reciprocal.num }
}
/// Returns the rational number as a floating-point number.
pub fn as_f64(self) -> f64 {
self.num as f64 / self.den as f64
}
}
/// Encoder settings which impact the produced bitstream.
#[derive(Clone, Debug)]
pub struct EncoderConfig {
// output size
/// Width of the frames in pixels.
pub width: usize,
/// Height of the frames in pixels.
pub height: usize,
// data format and ancillary color information
/// Bit depth.
pub bit_depth: usize,
/// Chroma subsampling.
pub chroma_sampling: ChromaSampling,
/// Chroma sample position.
pub chroma_sample_position: ChromaSamplePosition,
/// Pixel value range.
pub pixel_range: PixelRange,
/// Content color description (primaries, transfer characteristics, matrix).
pub color_description: Option<ColorDescription>,
/// HDR mastering display parameters.
pub mastering_display: Option<MasteringDisplay>,
/// HDR content light parameters.
pub content_light: Option<ContentLight>,
/// Still picture mode flag.
pub still_picture: bool,
// encoder configuration
/// Video time base.
pub time_base: Rational,
/// The *minimum* interval between two keyframes
pub min_key_frame_interval: u64,
/// The *maximum* interval between two keyframes
pub max_key_frame_interval: u64,
/// The number of temporal units over which to distribute the reservoir
/// usage.
pub reservoir_frame_delay: Option<i32>,
/// Flag to enable low latency mode.
///
/// In this mode the frame reordering is disabled.
pub low_latency: bool,
/// The base quantizer to use.
pub quantizer: usize,
/// The minimum allowed base quantizer to use in bitrate mode.
pub min_quantizer: u8,
/// The target bitrate for the bitrate mode.
pub bitrate: i32,
/// Metric to tune the quality for.
pub tune: Tune,
/// Number of tiles horizontally. Must be a power of two.
///
/// Overridden by [`tiles`], if present.
///
/// [`tiles`]: #structfield.tiles
pub tile_cols: usize,
/// Number of tiles vertically. Must be a power of two.
///
/// Overridden by [`tiles`], if present.
///
/// [`tiles`]: #structfield.tiles
pub tile_rows: usize,
/// Total number of tiles desired.
///
/// Encoder will try to optimally split to reach this number of tiles,
/// rounded up. Overrides [`tile_cols`] and [`tile_rows`].
///
/// [`tile_cols`]: #structfield.tile_cols
/// [`tile_rows`]: #structfield.tile_rows
pub tiles: usize,
/// Number of frames to read ahead for the RDO lookahead computation.
pub rdo_lookahead_frames: usize,
/// Settings which affect the enconding speed vs. quality trade-off.
pub speed_settings: SpeedSettings,
/// If enabled, computes the PSNR values and stores them in [`Packet`].
///
/// [`Packet`]: struct.Packet.html#structfield.psnr
pub show_psnr: bool,
/// Enables dumping of internal RDO training data.
pub train_rdo: bool,
}
/// Default preset for EncoderConfig: it is a balance between quality and
/// speed. See [`with_speed_preset()`].
///
/// [`with_speed_preset()`]: struct.EncoderConfig.html#method.with_speed_preset
impl Default for EncoderConfig {
fn default() -> Self {
const DEFAULT_SPEED: usize = 5;
Self::with_speed_preset(DEFAULT_SPEED)
}
}
impl EncoderConfig {
/// This is a preset which provides default settings according to a speed
/// value in the specific range 0–10. Each speed value corresponds to a
/// different preset. See [`from_preset()`]. If the input value is greater
/// than 10, it will result in the same settings as 10.
///
/// [`from_preset()`]: struct.SpeedSettings.html#method.from_preset
pub fn with_speed_preset(speed: usize) -> Self {
EncoderConfig {
width: 640,
height: 480,
bit_depth: 8,
chroma_sampling: ChromaSampling::Cs420,
chroma_sample_position: ChromaSamplePosition::Unknown,
pixel_range: Default::default(),
color_description: None,
mastering_display: None,
content_light: None,
still_picture: false,
time_base: Rational { num: 1, den: 30 },
min_key_frame_interval: 12,
max_key_frame_interval: 240,
min_quantizer: 0,
reservoir_frame_delay: None,
low_latency: false,
quantizer: 100,
bitrate: 0,
tune: Tune::default(),
tile_cols: 0,
tile_rows: 0,
tiles: 0,
rdo_lookahead_frames: 40,
speed_settings: SpeedSettings::from_preset(speed),
show_psnr: false,
train_rdo: false,
}
}
/// Sets the minimum and maximum keyframe interval, handling special cases as needed.
pub fn set_key_frame_interval(
&mut self, min_interval: u64, max_interval: u64,
) {
self.min_key_frame_interval = min_interval;
// Map an input value of 0 to an infinite interval
self.max_key_frame_interval =
if max_interval == 0 { std::u64::MAX } else { max_interval };
}
/// Returns the video frame rate computed from [`time_base`].
///
/// [`time_base`]: #structfield.time_base
pub fn frame_rate(&self) -> f64 {
Rational::from_reciprocal(self.time_base).as_f64()
}
}
impl fmt::Display for EncoderConfig {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
let pairs = [
("keyint_min", self.min_key_frame_interval.to_string()),
("keyint_max", self.max_key_frame_interval.to_string()),
("quantizer", self.quantizer.to_string()),
("bitrate", self.bitrate.to_string()),
("min_quantizer", self.min_quantizer.to_string()),
("low_latency", self.low_latency.to_string()),
("tune", self.tune.to_string()),
("tiles", self.tiles.to_string()),
("tile_rows", self.tile_rows.to_string()),
("tile_cols", self.tile_cols.to_string()),
("rdo_lookahead_frames", self.rdo_lookahead_frames.to_string()),
("min_block_size", self.speed_settings.min_block_size.to_string()),
("multiref", self.speed_settings.multiref.to_string()),
("fast_deblock", self.speed_settings.fast_deblock.to_string()),
("reduced_tx_set", self.speed_settings.reduced_tx_set.to_string()),
(
"tx_domain_distortion",
self.speed_settings.tx_domain_distortion.to_string(),
),
("tx_domain_rate", self.speed_settings.tx_domain_rate.to_string()),
("encode_bottomup", self.speed_settings.encode_bottomup.to_string()),
("rdo_tx_decision", self.speed_settings.rdo_tx_decision.to_string()),
("prediction_modes", self.speed_settings.prediction_modes.to_string()),
("include_near_mvs", self.speed_settings.include_near_mvs.to_string()),
(
"no_scene_detection",
self.speed_settings.no_scene_detection.to_string(),
),
("diamond_me", self.speed_settings.diamond_me.to_string()),
("cdef", self.speed_settings.cdef.to_string()),
("quantizer_rdo", self.speed_settings.quantizer_rdo.to_string()),
("use_satd_subpel", self.speed_settings.use_satd_subpel.to_string()),
];
write!(
f,
"{}",
pairs.iter().map(|pair| format!("{}={}", pair.0, pair.1)).join(" ")
)
}
}
/// Contains the speed settings.
#[derive(Clone, Copy, Debug)]
pub struct SpeedSettings {
/// Minimum block size.
///
/// Must be a square block size, so e.g. 8×4 isn't allowed here.
pub min_block_size: BlockSize,
/// Enables inter-frames to have multiple reference frames.
pub multiref: bool,
/// Enables fast deblocking filter.
pub fast_deblock: bool,
/// Enables reduced transform set.
pub reduced_tx_set: bool,
/// Enables using transform-domain distortion instead of pixel-domain.
pub tx_domain_distortion: bool,
/// Enables using transform-domain rate estimation.
pub tx_domain_rate: bool,
/// Enables bottom-up encoding, rather than top-down.
pub encode_bottomup: bool,
/// Enables searching transform size and type with RDO.
pub rdo_tx_decision: bool,
/// Prediction modes to search.
pub prediction_modes: PredictionModesSetting,
/// Enables searching near motion vectors during RDO.
pub include_near_mvs: bool,
/// Disables scene-cut detection.
pub no_scene_detection: bool,
/// Enables diamond motion vector search rather than full search.
pub diamond_me: bool,
/// Enables CDEF.
pub cdef: bool,
/// Enables searching for the optimal segment ID (quantizer delta) with RDO.
///
/// When disabled, the segment ID is chosen heuristically.
pub quantizer_rdo: bool,
/// Use SATD instead of SAD for subpixel search.
pub use_satd_subpel: bool,
}
impl Default for SpeedSettings {
fn default() -> Self {
SpeedSettings {
min_block_size: BlockSize::BLOCK_16X16,
multiref: false,
fast_deblock: false,
reduced_tx_set: false,
tx_domain_distortion: false,
tx_domain_rate: false,
encode_bottomup: false,
rdo_tx_decision: false,
prediction_modes: PredictionModesSetting::Simple,
include_near_mvs: false,
no_scene_detection: false,
diamond_me: false,
cdef: false,
quantizer_rdo: false,
use_satd_subpel: false,
}
}
}
impl SpeedSettings {
/// Set the speed setting according to a numeric speed preset.
///
/// The speed settings vary depending on speed value from 0 to 10.
/// - 10 (fastest): min block size 64x64, reduced TX set, TX domain distortion, fast deblock, no scenechange detection.
/// - 9: min block size 64x64, reduced TX set, TX domain distortion, fast deblock.
/// - 8: min block size 8x8, reduced TX set, TX domain distortion, fast deblock.
/// - 7: min block size 8x8, reduced TX set, TX domain distortion.
/// - 6: min block size 8x8, reduced TX set, TX domain distortion.
/// - 5 (default): min block size 8x8, reduced TX set, TX domain distortion, complex pred modes for keyframes.
/// - 4: min block size 8x8, TX domain distortion, complex pred modes for keyframes.
/// - 3: min block size 8x8, TX domain distortion, complex pred modes for keyframes, RDO TX decision.
/// - 2: min block size 8x8, TX domain distortion, complex pred modes for keyframes, RDO TX decision, include near MVs, quantizer RDO.
/// - 1: min block size 8x8, TX domain distortion, complex pred modes, RDO TX decision, include near MVs, quantizer RDO.
/// - 0 (slowest): min block size 4x4, TX domain distortion, complex pred modes, RDO TX decision, include near MVs, quantizer RDO, bottom-up encoding.
pub fn from_preset(speed: usize) -> Self {
SpeedSettings {
min_block_size: Self::min_block_size_preset(speed),
multiref: Self::multiref_preset(speed),
fast_deblock: Self::fast_deblock_preset(speed),
reduced_tx_set: Self::reduced_tx_set_preset(speed),
tx_domain_distortion: Self::tx_domain_distortion_preset(speed),
tx_domain_rate: Self::tx_domain_rate_preset(speed),
encode_bottomup: Self::encode_bottomup_preset(speed),
rdo_tx_decision: Self::rdo_tx_decision_preset(speed),
prediction_modes: Self::prediction_modes_preset(speed),
include_near_mvs: Self::include_near_mvs_preset(speed),
no_scene_detection: Self::no_scene_detection_preset(speed),
diamond_me: Self::diamond_me_preset(speed),
cdef: Self::cdef_preset(speed),
quantizer_rdo: Self::quantizer_rdo_preset(speed),
use_satd_subpel: Self::use_satd_subpel(speed),
}
}
/// This preset is set this way because 8x8 with reduced TX set is faster but with equivalent
/// or better quality compared to 16x16 or 32x32 (to which reduced TX set does not apply).
fn min_block_size_preset(speed: usize) -> BlockSize {
let min_block_size = if speed == 0 {
BlockSize::BLOCK_4X4
} else if speed <= 8 {
BlockSize::BLOCK_8X8
} else {
BlockSize::BLOCK_64X64
};
// Topdown search checks min_block_size for PARTITION_SPLIT only, so min_block_size must be square.
assert!(min_block_size.is_sqr());
min_block_size
}
/// Multiref is enabled automatically if low_latency is false,
/// but if someone is setting low_latency to true manually,
/// multiref has a large speed penalty with low quality gain.
/// Because low_latency can be set manually, this setting is conservative.
fn multiref_preset(speed: usize) -> bool {
speed <= 1
}
fn fast_deblock_preset(speed: usize) -> bool {
speed >= 8
}
fn reduced_tx_set_preset(speed: usize) -> bool {
speed >= 5
}
/// TX domain distortion is always faster, with no significant quality change
fn tx_domain_distortion_preset(_speed: usize) -> bool {
true
}
fn tx_domain_rate_preset(_speed: usize) -> bool {
false
}
fn encode_bottomup_preset(speed: usize) -> bool {
speed == 0
}
fn rdo_tx_decision_preset(speed: usize) -> bool {
speed <= 3
}
fn prediction_modes_preset(speed: usize) -> PredictionModesSetting {
if speed <= 1 {
PredictionModesSetting::ComplexAll
} else if speed <= 5 {
PredictionModesSetting::ComplexKeyframes
} else {
PredictionModesSetting::Simple
}
}
fn include_near_mvs_preset(speed: usize) -> bool {
speed <= 2
}
fn no_scene_detection_preset(speed: usize) -> bool {
speed == 10
}
/// Currently Diamond ME gives better quality than full search on most videos,
/// in addition to being faster.
// There are a few outliers, such as the Wikipedia test clip.
// TODO: Revisit this setting if full search quality improves in the future.
fn diamond_me_preset(_speed: usize) -> bool {
true
}
fn cdef_preset(_speed: usize) -> bool {
true
}
fn quantizer_rdo_preset(speed: usize) -> bool {
speed <= 2
}
fn use_satd_subpel(speed: usize) -> bool {
speed <= 9
}
}
/// Possible types of a frame.
#[allow(dead_code, non_camel_case_types)]
#[derive(Debug, Eq, PartialEq, Clone, Copy, Serialize, Deserialize)]
#[repr(C)]
pub enum FrameType {
/// Key frame.
KEY,
/// Inter-frame.
INTER,
/// Intra-only frame.
INTRA_ONLY,
/// Switching frame.
SWITCH,
}
impl fmt::Display for FrameType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use self::FrameType::*;
match self {
KEY => write!(f, "Key frame"),
INTER => write!(f, "Inter frame"),
INTRA_ONLY => write!(f, "Intra only frame"),
SWITCH => write!(f, "Switching frame"),
}
}
}
/// Prediction modes to search.
#[derive(Clone, Copy, Debug, PartialOrd, PartialEq, FromPrimitive)]
pub enum PredictionModesSetting {
/// Only simple prediction modes.
Simple,
/// Search all prediction modes on key frames and simple modes on other
/// frames.
ComplexKeyframes,
/// Search all prediction modes on all frames.
ComplexAll,
}
impl fmt::Display for PredictionModesSetting {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(
f,
"{}",
match self {
PredictionModesSetting::Simple => "Simple",
PredictionModesSetting::ComplexKeyframes => "Complex-KFs",
PredictionModesSetting::ComplexAll => "Complex-All",
}
)
}
}
/// Contains the encoder configuration.
#[derive(Clone, Debug, Default)]
pub struct Config {
/// Settings which impact the produced bitstream.
pub enc: EncoderConfig,
/// The number of threads in the threadpool.
pub threads: usize,
}
fn check_tile_log2(n: usize) -> bool {
let tile_log2 = TilingInfo::tile_log2(1, n);
if tile_log2.is_none() {
return false;
}
let tile_log2 = tile_log2.unwrap();
((1 << tile_log2) - n) == 0 || n == 0
}
impl Config {
/// Creates a [`Context`] with this configuration.
///
/// # Examples
///
/// ```
/// use rav1e::prelude::*;
///
/// # fn main() -> Result<(), EncoderStatus> {
/// let cfg = Config::default();
/// let ctx: Context<u8> = cfg.new_context()?;
/// # Ok(())
/// # }
/// ```
///
/// [`Context`]: struct.Context.html
pub fn new_context<T: Pixel>(&self) -> Result<Context<T>, EncoderStatus> {
assert!(
8 * std::mem::size_of::<T>() >= self.enc.bit_depth,
"The Pixel u{} does not match the Config bit_depth {}",
8 * std::mem::size_of::<T>(),
self.enc.bit_depth
);
info!("CPU Feature Level: {}", CpuFeatureLevel::default());
let pool = rayon::ThreadPoolBuilder::new()
.num_threads(self.threads)
.build()
.unwrap();
let mut config = self.enc.clone();
if config.width == 0 || config.height == 0 {
return Err(EncoderStatus::Failure);
}
if config.rdo_lookahead_frames > MAX_RDO_LOOKAHEAD_FRAMES {
return Err(EncoderStatus::Failure);
}
if !check_tile_log2(config.tile_cols) {
return Err(EncoderStatus::Failure);
}
if !check_tile_log2(config.tile_rows) {
return Err(EncoderStatus::Failure);
}
// FIXME: inter unsupported with 4:2:2 and 4:4:4 chroma sampling
let chroma_sampling = config.chroma_sampling;
// FIXME: tx partition for intra not supported for chroma 422
if chroma_sampling == ChromaSampling::Cs422 {
config.speed_settings.rdo_tx_decision = false;
}
let inner = ContextInner::new(&config).ok_or(EncoderStatus::Failure)?;
Ok(Context { is_flushing: false, inner, pool, config })
}
}
/// The set of options that controls frame re-ordering and reference picture
/// selection.
/// The options stored here are invariant over the whole encode.
#[derive(Debug, Clone, Copy)]
pub(crate) struct InterConfig {
/// Whether frame re-ordering is enabled.
reorder: bool,
/// Whether P-frames can use multiple references.
pub(crate) multiref: bool,
/// The depth of the re-ordering pyramid.
/// The current code cannot support values larger than 2.
pub(crate) pyramid_depth: u64,
/// Number of input frames in group.
pub(crate) group_input_len: u64,
/// Number of output frames in group.
/// This includes both hidden frames and "show existing frame" frames.
group_output_len: u64,
}
impl InterConfig {
fn new(enc_config: &EncoderConfig) -> InterConfig {
let reorder = !enc_config.low_latency;
// A group always starts with (group_output_len - group_input_len) hidden
// frames, followed by group_input_len shown frames.
// The shown frames iterate over the input frames in order, with frames
// already encoded as hidden frames now displayed with Show Existing
// Frame.
// For example, for a pyramid depth of 2, the group is as follows:
// |TU |TU |TU |TU
// idx_in_group_output: 0 1 2 3 4 5
// input_frameno: 4 2 1 SEF 3 SEF
// output_frameno: 1 2 3 4 5 6
// level: 0 1 2 1 2 0
// ^^^^^ ^^^^^^^^^^^^^
// hidden shown
// TODO: This only works for pyramid_depth <= 2 --- after that we need
// more hidden frames in the middle of the group.
let pyramid_depth = if reorder { 2 } else { 0 };
let group_input_len = 1 << pyramid_depth;
let group_output_len = group_input_len + pyramid_depth;
InterConfig {
reorder,
multiref: reorder || enc_config.speed_settings.multiref,
pyramid_depth,
group_input_len,
group_output_len,
}
}
/// Get the index of an output frame in its re-ordering group given the output
/// frame number of the frame in the current keyframe gop.
/// When re-ordering is disabled, this always returns 0.
pub(crate) fn get_idx_in_group_output(
&self, output_frameno_in_gop: u64,
) -> u64 {
// The first frame in the GOP should be a keyframe and is not re-ordered,
// so we should not be calling this function on it.
debug_assert!(output_frameno_in_gop > 0);
(output_frameno_in_gop - 1) % self.group_output_len
}
/// Get the order-hint of an output frame given the output frame number of the
/// frame in the current keyframe gop and the index of that output frame
/// in its re-ordering gorup.
pub(crate) fn get_order_hint(
&self, output_frameno_in_gop: u64, idx_in_group_output: u64,
) -> u32 {
// The first frame in the GOP should be a keyframe, but currently this
// function only handles inter frames.
// We could return 0 for keyframes if keyframe support is needed.
debug_assert!(output_frameno_in_gop > 0);
// Which P-frame group in the current gop is this output frame in?
// Subtract 1 because the first frame in the gop is always a keyframe.
let group_idx = (output_frameno_in_gop - 1) / self.group_output_len;
// Get the offset to the corresponding input frame.
// TODO: This only works with pyramid_depth <= 2.
let offset = if idx_in_group_output < self.pyramid_depth {
self.group_input_len >> idx_in_group_output
} else {
idx_in_group_output - self.pyramid_depth + 1
};
// Construct the final order hint relative to the start of the group.
(self.group_input_len * group_idx + offset) as u32
}
/// Get the level of the current frame in the pyramid.
pub(crate) fn get_level(&self, idx_in_group_output: u64) -> u64 {
if !self.reorder {
0
} else if idx_in_group_output < self.pyramid_depth {
// Hidden frames are output first (to be shown in the future).
idx_in_group_output
} else {
// Shown frames
// TODO: This only works with pyramid_depth <= 2.
pos_to_lvl(
idx_in_group_output - self.pyramid_depth + 1,
self.pyramid_depth,
)
}
}
pub(crate) fn get_slot_idx(&self, level: u64, order_hint: u32) -> u32 {
// Frames with level == 0 are stored in slots 0..4, and frames with higher
// values of level in slots 4..8
if level == 0 {
(order_hint >> self.pyramid_depth) & 3
} else {
// This only works with pyramid_depth <= 4.
3 + level as u32
}
}
pub(crate) fn get_show_frame(&self, idx_in_group_output: u64) -> bool {
idx_in_group_output >= self.pyramid_depth
}
pub(crate) fn get_show_existing_frame(
&self, idx_in_group_output: u64,
) -> bool {
// The self.reorder test here is redundant, but short-circuits the rest,
// avoiding a bunch of work when it's false.
self.reorder
&& self.get_show_frame(idx_in_group_output)
&& (idx_in_group_output - self.pyramid_depth + 1).count_ones() == 1
&& idx_in_group_output != self.pyramid_depth
}
pub(crate) fn get_input_frameno(
&self, output_frameno_in_gop: u64, gop_input_frameno_start: u64,
) -> u64 {
if output_frameno_in_gop == 0 {
gop_input_frameno_start
} else {
let idx_in_group_output =
self.get_idx_in_group_output(output_frameno_in_gop);
let order_hint =
self.get_order_hint(output_frameno_in_gop, idx_in_group_output);
gop_input_frameno_start + order_hint as u64
}
}
fn max_reordering_latency(&self) -> u64 {
self.group_input_len
}
pub(crate) fn keyframe_lookahead_distance(&self) -> u64 {
cmp::max(1, self.max_reordering_latency()) + 1
}
}
pub(crate) struct ContextInner<T: Pixel> {
frame_count: u64,
limit: Option<u64>,
inter_cfg: InterConfig,
output_frameno: u64,
frames_processed: u64,
/// Maps *input_frameno* to frames
frame_q: BTreeMap<u64, Option<Arc<Frame<T>>>>, // packet_q: VecDeque<Packet>
/// Maps *output_frameno* to frame data
frame_invariants: BTreeMap<u64, FrameInvariants<T>>,
/// A list of the input_frameno for keyframes in this encode.
/// Needed so that we don't need to keep all of the frame_invariants in
/// memory for the whole life of the encode.
// TODO: Is this needed at all?
keyframes: BTreeSet<u64>,
// TODO: Is this needed at all?
keyframes_forced: BTreeSet<u64>,
/// A storage space for reordered frames.
packet_data: Vec<u8>,
/// Maps `output_frameno` to `gop_output_frameno_start`.
gop_output_frameno_start: BTreeMap<u64, u64>,
/// Maps `output_frameno` to `gop_input_frameno_start`.
pub(crate) gop_input_frameno_start: BTreeMap<u64, u64>,
keyframe_detector: SceneChangeDetector,
pub(crate) config: EncoderConfig,
seq: Sequence,
rc_state: RCState,
maybe_prev_log_base_q: Option<i64>,
/// The next `input_frameno` to be processed by lookahead.
next_lookahead_frame: u64,
/// The next `output_frameno` to be computed by lookahead.
next_lookahead_output_frameno: u64,
}
/// The encoder context.
///
/// Contains the encoding state.
pub struct Context<T: Pixel> {
inner: ContextInner<T>,
config: EncoderConfig,
pool: rayon::ThreadPool,
is_flushing: bool,
}
/// Status that can be returned by [`Context`] functions.
///
/// [`Context`]: struct.Context.html
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum EncoderStatus {
/// The encoder needs more data to produce an output packet.
///
/// May be emitted by [`Context::receive_packet()`] when frame reordering is
/// enabled.
///
/// [`Context::receive_packet()`]: struct.Context.html#method.receive_packet
NeedMoreData,
/// There are enough frames in the queue.
///
/// May be emitted by [`Context::send_frame()`] when trying to send a frame
/// after the encoder has been flushed.
///
/// [`Context::send_frame()`]: struct.Context.html#method.send_frame
EnoughData,
/// The encoder has already produced the number of frames requested.
///
/// May be emitted by [`Context::receive_packet()`] after a flush request had
/// been processed or the frame limit had been reached.
///
/// [`Context::receive_packet()`]: struct.Context.html#method.receive_packet
LimitReached,
/// A frame had been encoded but not emitted yet.
Encoded,
/// Generic fatal error.
Failure,
/// A frame was encoded in the first pass of a 2-pass encode, but its stats
/// data was not retrieved with [`Context::twopass_out()`], or not enough
/// stats data was provided in the second pass of a 2-pass encode to encode
/// the next frame.
///
/// [`Context::twopass_out()`]: struct.Context.html#method.twopass_out
NotReady,
}
/// Represents a packet.
///
/// A packet contains one shown frame together with zero or more additional
/// frames.
#[derive(Debug, PartialEq)]
pub struct Packet<T: Pixel> {
/// The packet data.
pub data: Vec<u8>,
/// The reconstruction of the shown frame.
pub rec: Option<Frame<T>>,
/// The number of the input frame corresponding to the one shown frame in the
/// TU stored in this packet. Since AV1 does not explicitly reorder frames,
/// these will increase sequentially.
// TODO: When we want to add VFR support, we will need a more explicit time
// stamp here.
pub input_frameno: u64,
/// Type of the shown frame.
pub frame_type: FrameType,
/// PSNR for Y, U, and V planes for the shown frame.
pub psnr: Option<(f64, f64, f64)>,
/// QP selected for the frame.
pub qp: u8,
/// Block-level encoding stats for the frame
pub enc_stats: EncoderStats,
}
impl<T: Pixel> fmt::Display for Packet<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Frame {} - {} - {} bytes",
self.input_frameno,
self.frame_type,
self.data.len()
)
}
}
/// Types which can be converted into frames.
///
/// This trait is used in [`Context::send_frame`] to allow for passing in
/// frames with optional frame parameters and optionally frames wrapped in
/// `Arc` (to allow for zero-copy, since the encoder uses frames in `Arc`
/// internally).
///
/// [`Context::send_frame`]: struct.Context.html#method.send_frame
pub trait IntoFrame<T: Pixel> {
/// Converts the type into a tuple of frame and parameters.
fn into(self) -> (Option<Arc<Frame<T>>>, Option<FrameParameters>);
}
impl<T: Pixel> IntoFrame<T> for Option<Arc<Frame<T>>> {
fn into(self) -> (Option<Arc<Frame<T>>>, Option<FrameParameters>) {
(self, None)
}
}
impl<T: Pixel> IntoFrame<T> for Arc<Frame<T>> {
fn into(self) -> (Option<Arc<Frame<T>>>, Option<FrameParameters>) {
(Some(self), None)
}
}
impl<T: Pixel> IntoFrame<T> for (Arc<Frame<T>>, FrameParameters) {
fn into(self) -> (Option<Arc<Frame<T>>>, Option<FrameParameters>) {
(Some(self.0), Some(self.1))
}
}
impl<T: Pixel> IntoFrame<T> for (Arc<Frame<T>>, Option<FrameParameters>) {
fn into(self) -> (Option<Arc<Frame<T>>>, Option<FrameParameters>) {
(Some(self.0), self.1)
}
}
impl<T: Pixel> IntoFrame<T> for Frame<T> {
fn into(self) -> (Option<Arc<Frame<T>>>, Option<FrameParameters>) {
(Some(Arc::new(self)), None)
}
}
impl<T: Pixel> IntoFrame<T> for (Frame<T>, FrameParameters) {
fn into(self) -> (Option<Arc<Frame<T>>>, Option<FrameParameters>) {
(Some(Arc::new(self.0)), Some(self.1))
}
}
impl<T: Pixel> IntoFrame<T> for (Frame<T>, Option<FrameParameters>) {
fn into(self) -> (Option<Arc<Frame<T>>>, Option<FrameParameters>) {
(Some(Arc::new(self.0)), self.1)
}
}
impl<T: Pixel> Context<T> {
/// Allocates and returns a new frame.
///
/// # Examples
///
/// ```
/// use rav1e::prelude::*;
///
/// # fn main() -> Result<(), EncoderStatus> {
/// let cfg = Config::default();
/// let ctx: Context<u8> = cfg.new_context()?;
/// let frame = ctx.new_frame();
/// # Ok(())
/// # }
/// ```
#[inline]
pub fn new_frame(&self) -> Frame<T> {
Frame::new(
self.config.width,
self.config.height,
self.config.chroma_sampling,
)
}
/// Sends the frame for encoding.
///
/// This method adds the frame into the frame queue and runs the first passes
/// of the look-ahead computation.
///
/// Passing `None` is equivalent to calling [`flush`].
///
/// # Errors
///
/// If this method is called with a frame after the encoder has been flushed,
/// the [`EncoderStatus::EnoughData`] error is returned.
///
/// # Examples
///
/// ```
/// use rav1e::prelude::*;
///
/// # fn main() -> Result<(), EncoderStatus> {
/// let cfg = Config::default();
/// let mut ctx: Context<u8> = cfg.new_context().unwrap();
/// let f1 = ctx.new_frame();
/// let f2 = f1.clone();
/// let info = FrameParameters {
/// frame_type_override: FrameTypeOverride::Key
/// };
///
/// // Send the plain frame data
/// ctx.send_frame(f1)?;
/// // Send the data and the per-frame parameters
/// // In this case the frame is forced to be a keyframe.
/// ctx.send_frame((f2, info))?;
/// // Flush the encoder, it is equivalent to a call to `flush()`
/// ctx.send_frame(None)?;
/// # Ok(())
/// # }
/// ```
///
/// [`flush`]: #method.flush
/// [`EncoderStatus::EnoughData`]: enum.EncoderStatus.html#variant.EnoughData
#[inline]
pub fn send_frame<F>(&mut self, frame: F) -> Result<(), EncoderStatus>
where
F: IntoFrame<T>,
{
let (frame, params) = frame.into();
if frame.is_none() {
if self.is_flushing {
return Ok(());
}
self.inner.limit = Some(self.inner.frame_count);
self.is_flushing = true;
self.inner.compute_lookahead_data();
} else if self.is_flushing {
return Err(EncoderStatus::EnoughData);
}
self.inner.send_frame(frame, params)
}
/// Returns the first-pass data of a two-pass encode for the frame that was
/// just encoded.
///
/// This should be called BEFORE every call to [`receive_packet`] (including
/// the very first one), even if no packet was produced by the last call to
/// [`receive_packet`], if any (i.e., [`EncoderStatus::Encoded`] was
/// returned). It needs to be called once more after
/// [`EncoderStatus::LimitReached`] is returned, to retrieve the header that
/// should be written to the front of the stats file (overwriting the
/// placeholder header that was emitted at the start of encoding).
///
/// It is still safe to call this function when [`receive_packet`] returns
/// any other error. It will return `None` instead of returning a duplicate
/// copy of the previous frame's data.
///
/// [`receive_packet`]: #method.receive_packet
/// [`EncoderStatus::Encoded`]: enum.EncoderStatus.html#variant.Encoded
/// [`EncoderStatus::LimitReached`]:
/// enum.EncoderStatus.html#variant.LimitReached
#[inline]
pub fn twopass_out(&mut self) -> Option<&[u8]> {
let params = self
.inner
.rc_state
.get_twopass_out_params(&self.inner, self.inner.output_frameno);
self.inner.rc_state.twopass_out(params)
}
/// Returns the number of bytes of the stats file needed before the next
/// frame of the second pass in a two-pass encode can be encoded.
///
/// This is a lower bound (more might be required), but if `0` is returned,
/// then encoding can proceed. This is just a hint to the application, and
/// does not need to be called for encoding the second pass to work, so long
/// as the application continues to provide more data to [`twopass_in`] in a
/// loop until [`twopass_in`] returns `0`.
///
/// [`twopass_in`]: #method.twopass_in
#[inline]
pub fn twopass_bytes_needed(&mut self) -> usize {
self.inner.rc_state.twopass_in(None).unwrap_or(0)
}
/// Provides the stats data produced in the first pass of a two-pass encode
/// to the second pass.
///
/// On success this returns the number of bytes of the data which were
/// consumed. When encoding the second pass of a two-pass encode, this should
/// be called repeatedly in a loop before every call to [`receive_packet`]
/// (including the very first one) until no bytes are consumed, or until
/// [`twopass_bytes_needed`] returns `0`.
///
/// [`receive_packet`]: #method.receive_packet
/// [`twopass_bytes_needed`]: #method.twopass_bytes_needed
#[inline]
pub fn twopass_in(&mut self, buf: &[u8]) -> Result<usize, EncoderStatus> {
self.inner.rc_state.twopass_in(Some(buf)).or(Err(EncoderStatus::Failure))
}
/// Encodes the next frame and returns the encoded data.
///
/// This method is where the main encoding work is done.
///
/// # Examples
///
/// Encoding a single frame:
///
/// ```
/// use rav1e::prelude::*;
///
/// # fn main() -> Result<(), EncoderStatus> {
/// let cfg = Config::default();
/// let mut ctx: Context<u8> = cfg.new_context()?;
/// let frame = ctx.new_frame();
///
/// ctx.send_frame(frame)?;
/// ctx.flush();
///
/// loop {
/// match ctx.receive_packet() {
/// Ok(packet) => { /* Mux the packet. */ },
/// Err(EncoderStatus::Encoded) => (),
/// Err(EncoderStatus::LimitReached) => break,
/// Err(err) => return Err(err),
/// }
/// }
/// # Ok(())
/// # }
/// ```
///
/// Encoding a sequence of frames:
///
/// ```
/// use std::sync::Arc;
/// use rav1e::prelude::*;
///
/// fn encode_frames(
/// ctx: &mut Context<u8>,
/// mut frames: impl Iterator<Item=Frame<u8>>
/// ) -> Result<(), EncoderStatus> {
/// // This is a slightly contrived example, intended to showcase the
/// // various statuses that can be returned from receive_packet().
/// // Assume that, for example, there are a lot of frames in the
/// // iterator, which are produced lazily, so you don't want to send
/// // them all in at once as to not exhaust the memory.
/// loop {
/// match ctx.receive_packet() {
/// Ok(packet) => { /* Mux the packet. */ },
/// Err(EncoderStatus::Encoded) => {
/// // A frame was encoded without emitting a packet. This is
/// // normal, just proceed as usual.
/// },
/// Err(EncoderStatus::LimitReached) => {
/// // All frames have been encoded. Time to break out of the
/// // loop.
/// break;
/// },
/// Err(EncoderStatus::NeedMoreData) => {
/// // The encoder has requested additional frames. Push the
/// // next frame in, or flush the encoder if there are no
/// // frames left (on None).
/// ctx.send_frame(frames.next().map(Arc::new))?;
/// },
/// Err(EncoderStatus::EnoughData) => {
/// // Since we aren't trying to push frames after flushing,
/// // this should never happen in this example.
/// unreachable!();
/// },
/// Err(EncoderStatus::NotReady) => {
/// // We're not doing two-pass encoding, so this can never
/// // occur.
/// unreachable!();
/// },
/// Err(EncoderStatus::Failure) => {
/// return Err(EncoderStatus::Failure);
/// },
/// }
/// }
///
/// Ok(())
/// }
/// # fn main() -> Result<(), EncoderStatus> {
/// # let mut cfg = Config::default();
/// # // So it runs faster.
/// # cfg.enc.width = 16;
/// # cfg.enc.height = 16;
/// # let mut ctx: Context<u8> = cfg.new_context()?;
/// #
/// # let frames = vec![ctx.new_frame(); 4].into_iter();
/// # encode_frames(&mut ctx, frames);
/// #
/// # Ok(())
/// # }
/// ```
#[inline]
pub fn receive_packet(&mut self) -> Result<Packet<T>, EncoderStatus> {
let inner = &mut self.inner;
let pool = &mut self.pool;
pool.install(|| inner.receive_packet())
}
/// Flushes the encoder.
///
/// Flushing signals the end of the video. After the encoder has been
/// flushed, no additional frames are accepted.
#[inline]
pub fn flush(&mut self) {
self.send_frame(None).unwrap();
}
/// Produces a sequence header matching the current encoding context.
///
/// Its format is compatible with the AV1 Matroska and ISOBMFF specification.
/// Note that the returned header does not include any config OBUs which are
/// required for some uses. See [the specification].
///
/// [the specification]:
/// https://aomediacodec.github.io/av1-isobmff/#av1codecconfigurationbox-section
#[inline]
pub fn container_sequence_header(&self) -> Vec<u8> {
fn sequence_header_inner(seq: &Sequence) -> io::Result<Vec<u8>> {
let mut buf = Vec::new();
{
let mut bw = BitWriter::endian(&mut buf, BigEndian);
bw.write_bit(true)?; // marker
bw.write(7, 1)?; // version
bw.write(3, seq.profile)?;
bw.write(5, 31)?; // level
bw.write_bit(false)?; // tier
bw.write_bit(seq.bit_depth > 8)?; // high_bitdepth
bw.write_bit(seq.bit_depth == 12)?; // twelve_bit
bw.write_bit(seq.bit_depth == 1)?; // monochrome
bw.write_bit(seq.chroma_sampling != ChromaSampling::Cs444)?; // chroma_subsampling_x
bw.write_bit(seq.chroma_sampling == ChromaSampling::Cs420)?; // chroma_subsampling_y
bw.write(2, 0)?; // sample_position
bw.write(3, 0)?; // reserved
bw.write_bit(false)?; // initial_presentation_delay_present
bw.write(4, 0)?; // reserved
}
Ok(buf)
}
let seq = Sequence::new(&self.config).unwrap();
sequence_header_inner(&seq).unwrap()
}
}
impl<T: Pixel> ContextInner<T> {
pub fn new(enc: &EncoderConfig) -> Option<Self> {
// initialize with temporal delimiter
let packet_data = TEMPORAL_DELIMITER.to_vec();
let maybe_ac_qi_max =
if enc.quantizer < 255 { Some(enc.quantizer as u8) } else { None };
Some(ContextInner {
frame_count: 0,
limit: None,
inter_cfg: InterConfig::new(enc),
output_frameno: 0,
frames_processed: 0,
frame_q: BTreeMap::new(),
frame_invariants: BTreeMap::new(),
keyframes: BTreeSet::new(),
keyframes_forced: BTreeSet::new(),
packet_data,
gop_output_frameno_start: BTreeMap::new(),
gop_input_frameno_start: BTreeMap::new(),
keyframe_detector: SceneChangeDetector::new(enc.bit_depth as u8),
config: enc.clone(),
seq: Sequence::new(enc)?,
rc_state: RCState::new(
enc.width as i32,
enc.height as i32,
enc.time_base.den as i64,
enc.time_base.num as i64,
enc.bitrate,
maybe_ac_qi_max,
enc.min_quantizer,
enc.max_key_frame_interval as i32,
enc.reservoir_frame_delay,
)?,
maybe_prev_log_base_q: None,
next_lookahead_frame: 0,
next_lookahead_output_frameno: 0,
})
}
pub fn send_frame(
&mut self, frame: Option<Arc<Frame<T>>>, params: Option<FrameParameters>,
) -> Result<(), EncoderStatus> {
let input_frameno = self.frame_count;
if frame.is_some() {
self.frame_count += 1;
}
self.frame_q.insert(input_frameno, frame);
if let Some(params) = params {
if params.frame_type_override == FrameTypeOverride::Key {
self.keyframes_forced.insert(input_frameno);
}
}
self.compute_lookahead_data();
Ok(())
}
fn get_frame(&self, input_frameno: u64) -> Arc<Frame<T>> {
// Clones only the arc, so low cost overhead
self
.frame_q
.get(&input_frameno)
.as_ref()
.unwrap()
.as_ref()
.unwrap()
.clone()
}
/// Indicates whether more frames need to be read into the frame queue
/// in order for frame queue lookahead to be full.
fn needs_more_frame_q_lookahead(&self, input_frameno: u64) -> bool {
let lookahead_end = self.frame_q.keys().last().cloned().unwrap_or(0);
let frames_needed =
input_frameno + self.inter_cfg.keyframe_lookahead_distance() + 1;
lookahead_end < frames_needed && self.needs_more_frames(lookahead_end)
}
/// Indicates whether more frames need to be processed into FrameInvariants
/// in order for FI lookahead to be full.
fn needs_more_fi_lookahead(&self) -> bool {
let ready_frames = self.get_rdo_lookahead_frames().count();
ready_frames < self.config.rdo_lookahead_frames + 1
&& self.needs_more_frames(self.next_lookahead_frame)
}
pub fn needs_more_frames(&self, frame_count: u64) -> bool {
self.limit.map(|limit| frame_count < limit).unwrap_or(true)
}
fn get_rdo_lookahead_frames(
&self,
) -> impl Iterator<Item = (&u64, &FrameInvariants<T>)> {
self
.frame_invariants
.iter()
.skip_while(move |(&output_frameno, _)| {
output_frameno < self.output_frameno
})
.filter(|(_, fi)| !fi.invalid && !fi.show_existing_frame)
.take(self.config.rdo_lookahead_frames + 1)
}
fn next_keyframe_input_frameno(
&self, gop_input_frameno_start: u64, ignore_limit: bool,
) -> u64 {
let next_detected = self
.keyframes
.iter()
.find(|&&input_frameno| input_frameno > gop_input_frameno_start)
.cloned();
let mut next_limit =
gop_input_frameno_start + self.config.max_key_frame_interval;
if !ignore_limit && self.limit.is_some() {
next_limit = next_limit.min(self.limit.unwrap());
}
if next_detected.is_none() {
return next_limit;
}
cmp::min(next_detected.unwrap(), next_limit)
}
fn set_frame_properties(
&mut self, output_frameno: u64,
) -> Result<(), EncoderStatus> {
let fi = self.build_frame_properties(output_frameno)?;
self.frame_invariants.insert(output_frameno, fi);
Ok(())
}
fn build_frame_properties(
&mut self, output_frameno: u64,
) -> Result<FrameInvariants<T>, EncoderStatus> {
let (prev_gop_output_frameno_start, prev_gop_input_frameno_start) =
if output_frameno == 0 {
(0, 0)
} else {
(
self.gop_output_frameno_start[&(output_frameno - 1)],
self.gop_input_frameno_start[&(output_frameno - 1)],
)
};
self
.gop_output_frameno_start
.insert(output_frameno, prev_gop_output_frameno_start);
self
.gop_input_frameno_start
.insert(output_frameno, prev_gop_input_frameno_start);
let output_frameno_in_gop =
output_frameno - self.gop_output_frameno_start[&output_frameno];
let mut input_frameno = self.inter_cfg.get_input_frameno(
output_frameno_in_gop,
self.gop_input_frameno_start[&output_frameno],
);
if self.needs_more_frame_q_lookahead(input_frameno) {
return Err(EncoderStatus::NeedMoreData);
}
if output_frameno_in_gop > 0 {
let next_keyframe_input_frameno = self.next_keyframe_input_frameno(
self.gop_input_frameno_start[&output_frameno],
false,
);
let prev_input_frameno =
self.frame_invariants[&(output_frameno - 1)].input_frameno;
if input_frameno >= next_keyframe_input_frameno {
if !self.inter_cfg.reorder
|| ((output_frameno_in_gop - 1) % self.inter_cfg.group_output_len
== 0
&& prev_input_frameno == (next_keyframe_input_frameno - 1))
{
input_frameno = next_keyframe_input_frameno;
// If we'll return early, do it before modifying the state.
match self.frame_q.get(&input_frameno) {
Some(Some(_)) => {}
_ => {
return Err(EncoderStatus::NeedMoreData);
}
}
*self.gop_output_frameno_start.get_mut(&output_frameno).unwrap() =
output_frameno;
*self.gop_input_frameno_start.get_mut(&output_frameno).unwrap() =
next_keyframe_input_frameno;
} else {
let fi = FrameInvariants::new_inter_frame(
&self.frame_invariants[&(output_frameno - 1)],
&self.inter_cfg,
self.gop_input_frameno_start[&output_frameno],
output_frameno_in_gop,
next_keyframe_input_frameno,
);
assert!(fi.invalid);
return Ok(fi);
}
}
}
match self.frame_q.get(&input_frameno) {
Some(Some(_)) => {}
_ => {
return Err(EncoderStatus::NeedMoreData);
}
}
// Now that we know the input_frameno, look up the correct frame type
let frame_type = if self.keyframes.contains(&input_frameno) {
FrameType::KEY
} else {
FrameType::INTER
};
if frame_type == FrameType::KEY {
*self.gop_output_frameno_start.get_mut(&output_frameno).unwrap() =
output_frameno;
*self.gop_input_frameno_start.get_mut(&output_frameno).unwrap() =
input_frameno;
}
let output_frameno_in_gop =
output_frameno - self.gop_output_frameno_start[&output_frameno];
if output_frameno_in_gop == 0 {
let fi = FrameInvariants::new_key_frame(
self.config.clone(),
self.seq,
self.gop_input_frameno_start[&output_frameno],
);
assert!(!fi.invalid);
Ok(fi)
} else {
let next_keyframe_input_frameno = self.next_keyframe_input_frameno(
self.gop_input_frameno_start[&output_frameno],
false,
);
let fi = FrameInvariants::new_inter_frame(
&self.frame_invariants[&(output_frameno - 1)],
&self.inter_cfg,
self.gop_input_frameno_start[&output_frameno],
output_frameno_in_gop,
next_keyframe_input_frameno,
);
assert!(!fi.invalid);
Ok(fi)
}
}
pub(crate) fn done_processing(&self) -> bool {
self.limit.map(|limit| self.frames_processed == limit).unwrap_or(false)
}
/// Computes lookahead motion vectors and fills in `lookahead_mvs`,
/// `rec_buffer` and `lookahead_rec_buffer` on the `FrameInvariants`. This
/// function must be called after every new `FrameInvariants` is initially
/// computed.
fn compute_lookahead_motion_vectors(&mut self, output_frameno: u64) {
let fi = self.frame_invariants.get_mut(&output_frameno).unwrap();
// We're only interested in valid frames which are not show-existing-frame.
// Those two don't modify the rec_buffer so there's no need to do anything
// special about it either, it'll propagate on its own.
if fi.invalid || fi.show_existing_frame {
return;
}
let frame = self.frame_q[&fi.input_frameno].as_ref().unwrap();
// TODO: some of this work, like downsampling, could be reused in the
// actual encoding.
let mut fs = FrameState::new_with_frame(fi, frame.clone());
fs.input_hres.downsample_from(&frame.planes[0]);
fs.input_hres.pad(fi.width, fi.height);
fs.input_qres.downsample_from(&fs.input_hres);
fs.input_qres.pad(fi.width, fi.height);
#[cfg(feature = "dump_lookahead_data")]
{
let plane = &fs.input_qres;
image::GrayImage::from_fn(
plane.cfg.width as u32,
plane.cfg.height as u32,
|x, y| image::Luma([plane.p(x as usize, y as usize).as_()]),
)
.save(format!("{}-qres.png", fi.input_frameno))
.unwrap();
let plane = &fs.input_hres;
image::GrayImage::from_fn(
plane.cfg.width as u32,
plane.cfg.height as u32,
|x, y| image::Luma([plane.p(x as usize, y as usize).as_()]),
)
.save(format!("{}-hres.png", fi.input_frameno))
.unwrap();
}
// Do not modify the next output frame's FrameInvariants.
if self.output_frameno == output_frameno {
// We do want to propagate the lookahead_rec_buffer though.
let rfs = Arc::new(ReferenceFrame {
order_hint: fi.order_hint,
// Use the original frame contents.
frame: frame.clone(),
input_hres: fs.input_hres,
input_qres: fs.input_qres,
cdfs: fs.cdfs,
// TODO: can we set MVs here? We can probably even compute these MVs
// right now instead of in encode_tile?
frame_mvs: fs.frame_mvs,
output_frameno,
});
for i in 0..(REF_FRAMES as usize) {
if (fi.refresh_frame_flags & (1 << i)) != 0 {
fi.lookahead_rec_buffer.frames[i] = Some(Arc::clone(&rfs));
fi.lookahead_rec_buffer.deblock[i] = fs.deblock;
}
}
return;
}
// Our lookahead_rec_buffer should be filled with correct original frame
// data from the previous frames. Copy it into rec_buffer because that's
// what the MV search uses. During the actual encoding rec_buffer is
// overwritten with its correct values anyway.
fi.rec_buffer = fi.lookahead_rec_buffer.clone();
// TODO: as in the encoding code, key frames will have no references.
// However, for block importance purposes we want key frames to act as
// P-frames in this instance.
//
// Compute the motion vectors.
let mut blocks = FrameBlocks::new(fi.w_in_b, fi.h_in_b);
fi.tiling
.tile_iter_mut(&mut fs, &mut blocks)
.collect::<Vec<_>>()
.into_par_iter()
.for_each(|mut ctx| {
let ts = &mut ctx.ts;
// Compute the quarter-resolution motion vectors.
let tile_pmvs = build_coarse_pmvs(fi, ts);
// Compute the half-resolution motion vectors.
let mut half_res_pmvs = Vec::with_capacity(ts.sb_height * ts.sb_width);
for sby in 0..ts.sb_height {
for sbx in 0..ts.sb_width {
let tile_sbo =
TileSuperBlockOffset(SuperBlockOffset { x: sbx, y: sby });
half_res_pmvs
.push(build_half_res_pmvs(fi, ts, tile_sbo, &tile_pmvs));
}
}
// Compute the full-resolution motion vectors.
for sby in 0..ts.sb_height {
for sbx in 0..ts.sb_width {
let tile_sbo =
TileSuperBlockOffset(SuperBlockOffset { x: sbx, y: sby });
build_full_res_pmvs(fi, ts, tile_sbo, &half_res_pmvs);
}
}
});
// Save the motion vectors to FrameInvariants.
fi.lookahead_mvs = fs.frame_mvs.clone().into_boxed_slice();
#[cfg(feature = "dump_lookahead_data")]
{
use crate::partition::RefType::*;
let second_ref_frame = if !self.inter_cfg.multiref {
LAST_FRAME // make second_ref_frame match first
} else if fi.idx_in_group_output == 0 {
LAST2_FRAME
} else {
ALTREF_FRAME
};
// Use the default index, it corresponds to the last P-frame or to the
// backwards lower reference (so the closest previous frame).
let index = if second_ref_frame.to_index() != 0 { 0 } else { 1 };
let mvs = &fs.frame_mvs[index];
use byteorder::{NativeEndian, WriteBytesExt};
let mut buf = vec![];
buf.write_u64::<NativeEndian>(mvs.rows as u64).unwrap();
buf.write_u64::<NativeEndian>(mvs.cols as u64).unwrap();
for y in 0..mvs.rows {
for x in 0..mvs.cols {
let mv = mvs[y][x];
buf.write_i16::<NativeEndian>(mv.row).unwrap();
buf.write_i16::<NativeEndian>(mv.col).unwrap();
}
}
::std::fs::write(format!("{}-mvs.bin", fi.input_frameno), buf).unwrap();
}
// Set lookahead_rec_buffer on this FrameInvariants for future
// FrameInvariants to pick it up.
let rfs = Arc::new(ReferenceFrame {
order_hint: fi.order_hint,
// Use the original frame contents.
frame: frame.clone(),
input_hres: fs.input_hres,
input_qres: fs.input_qres,
cdfs: fs.cdfs,
frame_mvs: fs.frame_mvs,
output_frameno,
});
for i in 0..(REF_FRAMES as usize) {
if (fi.refresh_frame_flags & (1 << i)) != 0 {
fi.lookahead_rec_buffer.frames[i] = Some(Arc::clone(&rfs));
fi.lookahead_rec_buffer.deblock[i] = fs.deblock;
}
}
}
/// Computes lookahead intra cost approximations and fills in
/// `lookahead_intra_costs` on the `FrameInvariants`.
fn compute_lookahead_intra_costs(&mut self, output_frameno: u64) {
let fi = self.frame_invariants.get_mut(&output_frameno).unwrap();
// We're only interested in valid frames which are not show-existing-frame.
if fi.invalid || fi.show_existing_frame {
return;
}
let frame = self.frame_q[&fi.input_frameno].as_ref().unwrap();
let mut plane_after_prediction = frame.planes[0].clone();
for y in 0..fi.h_in_imp_b {
for x in 0..fi.w_in_imp_b {
let plane_org = frame.planes[0].region(Area::Rect {
x: (x * IMPORTANCE_BLOCK_SIZE) as isize,
y: (y * IMPORTANCE_BLOCK_SIZE) as isize,
width: IMPORTANCE_BLOCK_SIZE,
height: IMPORTANCE_BLOCK_SIZE,
});
// TODO: other intra prediction modes.
let edge_buf = get_intra_edges(
&frame.planes[0].as_region(),
TileBlockOffset(BlockOffset { x, y }),
0,
0,
BlockSize::BLOCK_8X8,
PlaneOffset {
x: (x * IMPORTANCE_BLOCK_SIZE) as isize,
y: (y * IMPORTANCE_BLOCK_SIZE) as isize,
},
TxSize::TX_8X8,
fi.sequence.bit_depth,
Some(PredictionMode::DC_PRED),
);
let mut plane_after_prediction_region = plane_after_prediction
.region_mut(Area::Rect {
x: (x * IMPORTANCE_BLOCK_SIZE) as isize,
y: (y * IMPORTANCE_BLOCK_SIZE) as isize,
width: IMPORTANCE_BLOCK_SIZE,
height: IMPORTANCE_BLOCK_SIZE,
});
PredictionMode::DC_PRED.predict_intra(
TileRect {
x: x * IMPORTANCE_BLOCK_SIZE,
y: y * IMPORTANCE_BLOCK_SIZE,
width: IMPORTANCE_BLOCK_SIZE,
height: IMPORTANCE_BLOCK_SIZE,
},
&mut plane_after_prediction_region,
TxSize::TX_8X8,
fi.sequence.bit_depth,
&[], // Not used by DC_PRED.
0, // Not used by DC_PRED.
&edge_buf,
);
let plane_after_prediction_region =
plane_after_prediction.region(Area::Rect {
x: (x * IMPORTANCE_BLOCK_SIZE) as isize,
y: (y * IMPORTANCE_BLOCK_SIZE) as isize,
width: IMPORTANCE_BLOCK_SIZE,
height: IMPORTANCE_BLOCK_SIZE,
});
let intra_cost = get_satd(
&plane_org,
&plane_after_prediction_region,
IMPORTANCE_BLOCK_SIZE,
IMPORTANCE_BLOCK_SIZE,
self.config.bit_depth,
);
fi.lookahead_intra_costs[y * fi.w_in_imp_b + x] = intra_cost;
}
}
}
fn compute_lookahead_data(&mut self) {
let lookahead_frames = self
.frame_q
.iter()
.filter_map(|(&input_frameno, frame)| {
if input_frameno >= self.next_lookahead_frame {
frame.clone()
} else {
None
}
})
.collect::<Vec<_>>();
let mut lookahead_idx = 0;
while !self.needs_more_frame_q_lookahead(self.next_lookahead_frame) {
// Process the next unprocessed frame
// Start by getting that frame and all frames after it in the queue
let current_lookahead_frames = &lookahead_frames[lookahead_idx..];
if current_lookahead_frames.is_empty() {
// All frames have been processed
break;
}
self.keyframe_detector.analyze_next_frame(
if self.next_lookahead_frame == 0 || self.config.still_picture {
None
} else {
self
.frame_q
.get(&(self.next_lookahead_frame - 1))
.map(|f| f.as_ref().unwrap().clone())
},
¤t_lookahead_frames,
self.next_lookahead_frame,
&self.config,
&self.inter_cfg,
&mut self.keyframes,
&self.keyframes_forced,
);
self.next_lookahead_frame += 1;
lookahead_idx += 1;
}
// Compute the frame invariants.
while self.set_frame_properties(self.next_lookahead_output_frameno).is_ok()
{
self
.compute_lookahead_motion_vectors(self.next_lookahead_output_frameno);
self.compute_lookahead_intra_costs(self.next_lookahead_output_frameno);
self.next_lookahead_output_frameno += 1;
}
}
/// Computes the block importances for the current output frame.
fn compute_block_importances(&mut self) {
// SEF don't need block importances.
if self.frame_invariants[&self.output_frameno].show_existing_frame {
return;
}
// Get a list of output_framenos that we want to propagate through.
let output_framenos = self
.get_rdo_lookahead_frames()
.map(|(&output_frameno, _)| output_frameno)
.collect::<Vec<_>>();
// The first one should be the current output frame.
assert_eq!(output_framenos[0], self.output_frameno);
// First, initialize them all with zeros.
for output_frameno in output_framenos.iter() {
let fi = self.frame_invariants.get_mut(output_frameno).unwrap();
for x in fi.block_importances.iter_mut() {
*x = 0.;
}
}
// Now compute and propagate the block importances from the end. The
// current output frame will get its block importances from the future
// frames.
const MV_UNITS_PER_PIXEL: i64 = 8;
const BLOCK_SIZE_IN_MV_UNITS: i64 =
IMPORTANCE_BLOCK_SIZE as i64 * MV_UNITS_PER_PIXEL;
const BLOCK_AREA_IN_MV_UNITS: i64 =
BLOCK_SIZE_IN_MV_UNITS * BLOCK_SIZE_IN_MV_UNITS;
for &output_frameno in output_framenos.iter().skip(1).rev() {
// Remove fi from the map temporarily and put it back in in the end of
// the iteration. This is required because we need to mutably borrow
// referenced fis from the map, and that wouldn't be possible if this was
// an active borrow.
let fi = self.frame_invariants.remove(&output_frameno).unwrap();
// TODO: see comment above about key frames not having references.
if fi.frame_type == FrameType::KEY {
self.frame_invariants.insert(output_frameno, fi);
continue;
}
let frame = self.frame_q[&fi.input_frameno].as_ref().unwrap();
// There can be at most 3 of these.
let mut unique_indices = ArrayVec::<[_; 3]>::new();
for (mv_index, &rec_index) in fi.ref_frames.iter().enumerate() {
if unique_indices.iter().find(|&&(_, r)| r == rec_index).is_none() {
unique_indices.push((mv_index, rec_index));
}
}
// Compute and propagate the importance, split evenly between the
// referenced frames.
for &(mv_index, rec_index) in unique_indices.iter() {
// Use rec_buffer here rather than lookahead_rec_buffer because
// rec_buffer still contains the reference frames for the current frame
// (it's only overwritten when the frame is encoded), while
// lookahead_rec_buffer already contains reference frames for the next
// frame (for the reference propagation to work correctly).
let reference =
fi.rec_buffer.frames[rec_index as usize].as_ref().unwrap();
let reference_frame = &reference.frame;
let reference_output_frameno = reference.output_frameno;
// We should never use frame as its own reference.
assert_ne!(reference_output_frameno, output_frameno);
for y in 0..fi.h_in_imp_b {
for x in 0..fi.w_in_imp_b {
let mv = fi.lookahead_mvs[mv_index][y * 2][x * 2];
// Coordinates of the top-left corner of the reference block, in MV
// units.
let reference_x =
x as i64 * BLOCK_SIZE_IN_MV_UNITS + mv.col as i64;
let reference_y =
y as i64 * BLOCK_SIZE_IN_MV_UNITS + mv.row as i64;
let plane_org = frame.planes[0].region(Area::Rect {
x: (x * IMPORTANCE_BLOCK_SIZE) as isize,
y: (y * IMPORTANCE_BLOCK_SIZE) as isize,
width: IMPORTANCE_BLOCK_SIZE,
height: IMPORTANCE_BLOCK_SIZE,
});
let plane_ref = reference_frame.planes[0].region(Area::Rect {
x: reference_x as isize / MV_UNITS_PER_PIXEL as isize,
y: reference_y as isize / MV_UNITS_PER_PIXEL as isize,
width: IMPORTANCE_BLOCK_SIZE,
height: IMPORTANCE_BLOCK_SIZE,
});
let inter_cost = get_satd(
&plane_org,
&plane_ref,
IMPORTANCE_BLOCK_SIZE,
IMPORTANCE_BLOCK_SIZE,
self.config.bit_depth,
) as f32;
let intra_cost =
fi.lookahead_intra_costs[y * fi.w_in_imp_b + x] as f32;
let future_importance =
fi.block_importances[y * fi.w_in_imp_b + x];
let propagate_fraction = (1. - inter_cost / intra_cost).max(0.);
let propagate_amount = (intra_cost + future_importance)
* propagate_fraction
/ unique_indices.len() as f32;
if let Some(reference_frame_block_importances) = self
.frame_invariants
.get_mut(&reference_output_frameno)
.map(|fi| &mut fi.block_importances)
{
let mut propagate =
|block_x_in_mv_units, block_y_in_mv_units, fraction| {
let x = block_x_in_mv_units / BLOCK_SIZE_IN_MV_UNITS;
let y = block_y_in_mv_units / BLOCK_SIZE_IN_MV_UNITS;
// TODO: propagate partially if the block is partially off-frame
// (possible on right and bottom edges)?
if x >= 0
&& y >= 0
&& (x as usize) < fi.w_in_imp_b
&& (y as usize) < fi.h_in_imp_b
{
reference_frame_block_importances
[y as usize * fi.w_in_imp_b + x as usize] +=
propagate_amount * fraction;
}
};
// Coordinates of the top-left corner of the block intersecting the
// reference block from the top-left.
let top_left_block_x = (reference_x
- if reference_x < 0 {
BLOCK_SIZE_IN_MV_UNITS - 1
} else {
0
})
/ BLOCK_SIZE_IN_MV_UNITS
* BLOCK_SIZE_IN_MV_UNITS;
let top_left_block_y = (reference_y
- if reference_y < 0 {
BLOCK_SIZE_IN_MV_UNITS - 1
} else {
0
})
/ BLOCK_SIZE_IN_MV_UNITS
* BLOCK_SIZE_IN_MV_UNITS;
debug_assert!(reference_x >= top_left_block_x);
debug_assert!(reference_y >= top_left_block_y);
let top_right_block_x =
top_left_block_x + BLOCK_SIZE_IN_MV_UNITS;
let top_right_block_y = top_left_block_y;
let bottom_left_block_x = top_left_block_x;
let bottom_left_block_y =
top_left_block_y + BLOCK_SIZE_IN_MV_UNITS;
let bottom_right_block_x = top_right_block_x;
let bottom_right_block_y = bottom_left_block_y;
let top_left_block_fraction = ((top_right_block_x - reference_x)
* (bottom_left_block_y - reference_y))
as f32
/ BLOCK_AREA_IN_MV_UNITS as f32;
propagate(
top_left_block_x,
top_left_block_y,
top_left_block_fraction,
);
let top_right_block_fraction =
((reference_x + BLOCK_SIZE_IN_MV_UNITS - top_right_block_x)
* (bottom_left_block_y - reference_y))
as f32
/ BLOCK_AREA_IN_MV_UNITS as f32;
propagate(
top_right_block_x,
top_right_block_y,
top_right_block_fraction,
);
let bottom_left_block_fraction = ((top_right_block_x
- reference_x)
* (reference_y + BLOCK_SIZE_IN_MV_UNITS - bottom_left_block_y))
as f32
/ BLOCK_AREA_IN_MV_UNITS as f32;
propagate(
bottom_left_block_x,
bottom_left_block_y,
bottom_left_block_fraction,
);
let bottom_right_block_fraction =
((reference_x + BLOCK_SIZE_IN_MV_UNITS - top_right_block_x)
* (reference_y + BLOCK_SIZE_IN_MV_UNITS
- bottom_left_block_y)) as f32
/ BLOCK_AREA_IN_MV_UNITS as f32;
propagate(
bottom_right_block_x,
bottom_right_block_y,
bottom_right_block_fraction,
);
}
}
}
}
self.frame_invariants.insert(output_frameno, fi);
}
// Get the final block importance values for the current output frame.
if !output_framenos.is_empty() {
let fi = self.frame_invariants.get_mut(&output_framenos[0]).unwrap();
for y in 0..fi.h_in_imp_b {
for x in 0..fi.w_in_imp_b {
let intra_cost =
fi.lookahead_intra_costs[y * fi.w_in_imp_b + x] as f32;
let importance = &mut fi.block_importances[y * fi.w_in_imp_b + x];
if intra_cost > 0. {
*importance = (1. + *importance / intra_cost).log2();
} else {
*importance = 0.;
}
assert!(*importance >= 0.);
}
}
#[cfg(feature = "dump_lookahead_data")]
{
let data = &fi.block_importances;
use byteorder::{NativeEndian, WriteBytesExt};
let mut buf = vec![];
buf.write_u64::<NativeEndian>(fi.h_in_imp_b as u64).unwrap();
buf.write_u64::<NativeEndian>(fi.w_in_imp_b as u64).unwrap();
for y in 0..fi.h_in_imp_b {
for x in 0..fi.w_in_imp_b {
let importance = data[y * fi.w_in_imp_b + x];
buf.write_f32::<NativeEndian>(importance).unwrap();
}
}
::std::fs::write(format!("{}-imps.bin", fi.input_frameno), buf)
.unwrap();
}
}
}
pub fn receive_packet(&mut self) -> Result<Packet<T>, EncoderStatus> {
if self.done_processing() {
return Err(EncoderStatus::LimitReached);
}
if self.needs_more_fi_lookahead() {
return Err(EncoderStatus::NeedMoreData);
}
// Find the next output_frameno corresponding to a non-skipped frame.
self.output_frameno = self
.frame_invariants
.iter()
.skip_while(|(&output_frameno, _)| output_frameno < self.output_frameno)
.find(|(_, fi)| !fi.invalid)
.map(|(&output_frameno, _)| output_frameno)
.ok_or(EncoderStatus::NeedMoreData)?; // TODO: doesn't play well with the below check?
let input_frameno =
self.frame_invariants[&self.output_frameno].input_frameno;
if !self.needs_more_frames(input_frameno) {
return Err(EncoderStatus::LimitReached);
}
// Compute the block importances for the current output frame.
self.compute_block_importances();
let cur_output_frameno = self.output_frameno;
let ret = {
let fi = self.frame_invariants.get(&cur_output_frameno).unwrap();
if fi.show_existing_frame {
if !self.rc_state.ready() {
return Err(EncoderStatus::NotReady);
}
let mut fs = FrameState::new(fi);
let sef_data = encode_show_existing_frame(fi, &mut fs);
let bits = (sef_data.len() * 8) as i64;
self.packet_data.extend(sef_data);
self.rc_state.update_state(
bits,
FRAME_SUBTYPE_SEF,
fi.show_frame,
0,
false,
false,
);
let rec = if fi.show_frame { Some(fs.rec) } else { None };
self.output_frameno += 1;
let input_frameno = fi.input_frameno;
let frame_type = fi.frame_type;
let bit_depth = fi.sequence.bit_depth;
let qp = fi.base_q_idx;
self.finalize_packet(
rec,
input_frameno,
frame_type,
bit_depth,
qp,
fs.enc_stats,
)
} else if let Some(f) = self.frame_q.get(&fi.input_frameno) {
if !self.rc_state.ready() {
return Err(EncoderStatus::NotReady);
}
if let Some(frame) = f.clone() {
let fti = fi.get_frame_subtype();
let qps = self.rc_state.select_qi(
self,
self.output_frameno,
fti,
self.maybe_prev_log_base_q,
);
let fi = self.frame_invariants.get_mut(&cur_output_frameno).unwrap();
fi.set_quantizers(&qps);
if self.rc_state.needs_trial_encode(fti) {
let mut fs = FrameState::new_with_frame(fi, frame.clone());
let data = encode_frame(fi, &mut fs);
self.rc_state.update_state(
(data.len() * 8) as i64,
fti,
fi.show_frame,
qps.log_target_q,
true,
false,
);
let qps = self.rc_state.select_qi(
self,
self.output_frameno,
fti,
self.maybe_prev_log_base_q,
);
let fi =
self.frame_invariants.get_mut(&cur_output_frameno).unwrap();
fi.set_quantizers(&qps);
}
let fi = self.frame_invariants.get_mut(&cur_output_frameno).unwrap();
let mut fs = FrameState::new_with_frame(fi, frame.clone());
let data = encode_frame(fi, &mut fs);
let enc_stats = fs.enc_stats.clone();
self.maybe_prev_log_base_q = Some(qps.log_base_q);
// TODO: Add support for dropping frames.
self.rc_state.update_state(
(data.len() * 8) as i64,
fti,
fi.show_frame,
qps.log_target_q,
false,
false,
);
self.packet_data.extend(data);
fs.rec.pad(fi.width, fi.height);
// TODO avoid the clone by having rec Arc.
let rec = if fi.show_frame { Some(fs.rec.clone()) } else { None };
update_rec_buffer(self.output_frameno, fi, fs);
// Copy persistent fields into subsequent FrameInvariants.
let rec_buffer = fi.rec_buffer.clone();
for subsequent_fi in self
.frame_invariants
.iter_mut()
.skip_while(|(&output_frameno, _)| {
output_frameno <= cur_output_frameno
})
.map(|(_, fi)| fi)
// Here we want the next valid non-show-existing-frame inter frame.
//
// Copying to show-existing-frame frames isn't actually required
// for correct encoding, but it's needed for the reconstruction to
// work correctly.
.filter(|fi| !fi.invalid)
.take_while(|fi| fi.frame_type != FrameType::KEY)
{
subsequent_fi.rec_buffer = rec_buffer.clone();
subsequent_fi.set_ref_frame_sign_bias();
// Stop after the first non-show-existing-frame.
if !subsequent_fi.show_existing_frame {
break;
}
}
let fi = self.frame_invariants.get(&self.output_frameno).unwrap();
self.output_frameno += 1;
if fi.show_frame {
let input_frameno = fi.input_frameno;
let frame_type = fi.frame_type;
let bit_depth = fi.sequence.bit_depth;
let qp = fi.base_q_idx;
self.finalize_packet(
rec,
input_frameno,
frame_type,
bit_depth,
qp,
enc_stats,
)
} else {
Err(EncoderStatus::Encoded)
}
} else {
Err(EncoderStatus::NeedMoreData)
}
} else {
Err(EncoderStatus::NeedMoreData)
}
};
if let Ok(ref pkt) = ret {
self.garbage_collect(pkt.input_frameno);
}
ret
}
fn finalize_packet(
&mut self, rec: Option<Frame<T>>, input_frameno: u64,
frame_type: FrameType, bit_depth: usize, qp: u8, enc_stats: EncoderStats,
) -> Result<Packet<T>, EncoderStatus> {
let data = self.packet_data.clone();
self.packet_data.clear();
if write_temporal_delimiter(&mut self.packet_data).is_err() {
return Err(EncoderStatus::Failure);
}
let mut psnr = None;
if self.config.show_psnr {
if let Some(ref rec) = rec {
let original_frame = self.get_frame(input_frameno);
psnr = Some(calculate_frame_psnr(&*original_frame, rec, bit_depth));
}
}
self.frames_processed += 1;
Ok(Packet { data, rec, input_frameno, frame_type, psnr, qp, enc_stats })
}
fn garbage_collect(&mut self, cur_input_frameno: u64) {
if cur_input_frameno == 0 {
return;
}
let frame_q_start = self.frame_q.keys().next().cloned().unwrap_or(0);
for i in frame_q_start..cur_input_frameno {
self.frame_q.remove(&i);
}
if self.output_frameno < 2 {
return;
}
let fi_start = self.frame_invariants.keys().next().cloned().unwrap_or(0);
for i in fi_start..(self.output_frameno - 1) {
self.frame_invariants.remove(&i);
self.gop_output_frameno_start.remove(&i);
self.gop_input_frameno_start.remove(&i);
}
}
/// Counts the number of output frames of each subtype in the next
/// reservoir_frame_delay temporal units (needed for rate control).
/// Returns the number of output frames (excluding SEF frames) and output TUs
/// until the last keyframe in the next reservoir_frame_delay temporal units,
/// or the end of the interval, whichever comes first.
/// The former is needed because it indicates the number of rate estimates we
/// will make.
/// The latter is needed because it indicates the number of times new bitrate
/// is added to the buffer.
pub(crate) fn guess_frame_subtypes(
&self, nframes: &mut [i32; FRAME_NSUBTYPES + 1],
reservoir_frame_delay: i32,
) -> (i32, i32) {
for fti in 0..=FRAME_NSUBTYPES {
nframes[fti] = 0;
}
// Two-pass calls this function before receive_packet(), and in particular
// before the very first send_frame(), when the following maps are empty.
// In this case, return 0 as the default value.
let mut prev_keyframe_input_frameno = *self
.gop_input_frameno_start
.get(&self.output_frameno)
.unwrap_or_else(|| {
assert!(self.output_frameno == 0);
&0
});
let mut prev_keyframe_output_frameno = *self
.gop_output_frameno_start
.get(&self.output_frameno)
.unwrap_or_else(|| {
assert!(self.output_frameno == 0);
&0
});
let mut prev_keyframe_ntus = 0;
// Does not include SEF frames.
let mut prev_keyframe_nframes = 0;
let mut acc: [i32; FRAME_NSUBTYPES + 1] = [0; FRAME_NSUBTYPES + 1];
// Updates the frame counts with the accumulated values when we hit a
// keyframe.
fn collect_counts(
nframes: &mut [i32; FRAME_NSUBTYPES + 1],
acc: &mut [i32; FRAME_NSUBTYPES + 1],
) {
for fti in 0..=FRAME_NSUBTYPES {
nframes[fti] += acc[fti];
acc[fti] = 0;
}
acc[FRAME_SUBTYPE_I] += 1;
}
let mut output_frameno = self.output_frameno;
let mut ntus = 0;
// Does not include SEF frames.
let mut nframes_total = 0;
while ntus < reservoir_frame_delay {
let output_frameno_in_gop =
output_frameno - prev_keyframe_output_frameno;
let is_kf = if let Some(fi) = self.frame_invariants.get(&output_frameno)
{
if fi.frame_type == FrameType::KEY {
prev_keyframe_input_frameno = fi.input_frameno;
// We do not currently use forward keyframes, so they should always
// end the current TU (thus we always increment ntus below).
debug_assert!(fi.show_frame);
true
} else {
false
}
} else {
// It is possible to be invoked for the first time from twopass_out()
// before receive_packet() is called, in which case frame_invariants
// will not be populated.
// Force the first frame in each GOP to be a keyframe in that case.
output_frameno_in_gop == 0
};
if is_kf {
collect_counts(nframes, &mut acc);
prev_keyframe_output_frameno = output_frameno;
prev_keyframe_ntus = ntus;
prev_keyframe_nframes = nframes_total;
output_frameno += 1;
ntus += 1;
nframes_total += 1;
continue;
}
let idx_in_group_output =
self.inter_cfg.get_idx_in_group_output(output_frameno_in_gop);
let input_frameno = prev_keyframe_input_frameno
+ self
.inter_cfg
.get_order_hint(output_frameno_in_gop, idx_in_group_output)
as u64;
// For rate control purposes, ignore any limit on frame count that has
// been set.
// We pretend that we will keep encoding frames forever to prevent the
// control loop from driving us into the rails as we come up against a
// hard stop (with no more chance to correct outstanding errors).
let next_keyframe_input_frameno =
self.next_keyframe_input_frameno(prev_keyframe_input_frameno, true);
// If we are re-ordering, we may skip some output frames in the final
// re-order group of the GOP.
if input_frameno >= next_keyframe_input_frameno {
// If we have encoded enough whole groups to reach the next keyframe,
// then start the next keyframe gop.
if 1
+ (output_frameno - prev_keyframe_output_frameno)
/ self.inter_cfg.group_output_len
* self.inter_cfg.group_input_len
>= next_keyframe_input_frameno - prev_keyframe_input_frameno
{
collect_counts(nframes, &mut acc);
prev_keyframe_input_frameno = input_frameno;
prev_keyframe_output_frameno = output_frameno;
prev_keyframe_ntus = ntus;
prev_keyframe_nframes = nframes_total;
// We do not currently use forward keyframes, so they should always
// end the current TU.
debug_assert!(self.inter_cfg.get_show_frame(idx_in_group_output));
output_frameno += 1;
ntus += 1;
}
output_frameno += 1;
continue;
}
if self.inter_cfg.get_show_existing_frame(idx_in_group_output) {
acc[FRAME_SUBTYPE_SEF] += 1;
} else {
// TODO: Implement golden P-frames.
let fti = FRAME_SUBTYPE_P
+ (self.inter_cfg.get_level(idx_in_group_output) as usize);
acc[fti] += 1;
nframes_total += 1;
}
if self.inter_cfg.get_show_frame(idx_in_group_output) {
ntus += 1;
}
output_frameno += 1;
}
if prev_keyframe_output_frameno <= self.output_frameno {
// If there were no keyframes at all, or only the first frame was a
// keyframe, the accumulators never flushed and still contain counts for
// the entire buffer.
// In both cases, we return these counts.
collect_counts(nframes, &mut acc);
(nframes_total, ntus)
} else {
// Otherwise, we discard what remains in the accumulators as they contain
// the counts from and past the last keyframe.
(prev_keyframe_nframes, prev_keyframe_ntus)
}
}
}
|
use common::race_run::{self, NewRaceRun};
use db::models::*;
use db::schema::*;
use diesel::QueryResult;
use diesel::prelude::*;
use diesel::result;
use rocket::http::Status;
use rocket::request::{self, FromRequest};
use rocket::{Outcome, Request, State};
use server::db_conn::DbConn;
use server::db_conn::Pool;
use server::runstore::RunStore;
pub struct DbExecuter {
conn: DbConn,
}
impl<'a, 'r> FromRequest<'a, 'r> for DbExecuter {
type Error = ();
fn from_request(request: &'a Request<'r>) -> request::Outcome<DbExecuter, ()> {
let pool = request.guard::<State<Pool>>()?;
match pool.get() {
Ok(conn) => Outcome::Success(Self { conn: DbConn(conn) }),
Err(_) => Outcome::Failure((Status::ServiceUnavailable, ())),
}
}
}
impl RunStore for DbExecuter {
type Error = result::Error;
type Key = i32;
fn get_racerun(&self, id: &Self::Key) -> QueryResult<Option<NewRaceRun>> {
self.get_run_by_id(&id).optional()
}
fn create_racerun(&self, new_run: &NewRaceRun) -> QueryResult<Self::Key> {
let run = create_run(&self.conn, new_run.duration_in_seconds as i32)?;
let _ = new_run
.zones
.iter()
.map(|zone| {
create_zoneentry(
&self.conn,
zone.seconds_after_start as i32,
run.id,
&zone.name,
)
})
.collect::<Result<Vec<_>, _>>()?;
let _ = new_run
.levels
.iter()
.map(|level| {
create_levelup(
&self.conn,
level.seconds_after_start as i32,
run.id,
level.level as i16,
)
})
.collect::<Result<Vec<_>, _>>()?;
Ok(run.id)
}
}
impl DbExecuter {
fn get_run_by_id(&self, id: &i32) -> QueryResult<NewRaceRun> {
let run: Run = runs::table.find(id).get_result(&*self.conn)?;
let db_zones: Vec<ZoneEntry> = ZoneEntry::belonging_to(&run).load(&*self.conn)?;
let db_levels: Vec<LevelUp> = LevelUp::belonging_to(&run).load(&*self.conn)?;
let zones = db_zones
.into_iter()
.map(|zone| {
race_run::ZoneEntry::new(zone.name, zone.duration_in_seconds as u64)
})
.collect();
let levels = db_levels
.into_iter()
.map(|levelup| {
race_run::LevelUp::new(levelup.level as u8, levelup.duration_in_seconds as u64)
})
.collect();
let new_race_run = NewRaceRun::new(run.duration_in_seconds as u64, levels, zones);
Ok(new_race_run)
}
}
|
use crate::helpers::{pos, span};
use graphql_parser::query::refs::{FieldRef, SelectionSetRef};
use graphql_parser::query::{Directive, Txt, Value};
use graphql_parser::{schema, schema::Type};
lazy_static! {
static ref TYPENAME_QUERY_FIELD: FieldRef<'static> = FieldRef {
position: pos(),
alias: None,
name: TYPENAME_FIELD_NAME,
arguments: &EMPTY_ARGS,
directives: &EMPTY_DIRECTIVES,
selection_set: SelectionSetRef {
span: span(),
items: vec![],
},
};
static ref TYPENAME_SCHEMA_FIELD: schema::Field<'static> = schema::Field {
position: pos(),
description: None,
name: TYPENAME_FIELD_NAME,
arguments: vec![],
field_type: Type::NonNullType(Box::new(Type::NamedType("String"))),
directives: vec![]
};
}
pub static TYPENAME_FIELD_NAME: &str = "__typename";
pub static INTROSPECTION_SCHEMA_FIELD_NAME: &str = "__schema";
pub static INTROSPECTION_TYPE_FIELD_NAME: &str = "__type";
pub static QUERY_TYPE_NAME: &str = "Query";
pub static MUTATION_TYPE_NAME: &str = "Mutation";
pub static EMPTY_ARGS: Vec<(Txt<'static>, Value<'static>)> = vec![];
pub static EMPTY_DIRECTIVES: Vec<Directive<'static>> = vec![];
pub(crate) fn typename_field_def<'a>() -> &'a schema::Field<'a> {
&*TYPENAME_SCHEMA_FIELD
}
pub(crate) fn typename_field_node<'a>() -> FieldRef<'a> {
(*TYPENAME_QUERY_FIELD).clone()
}
|
use std::collections::HashSet;
pub fn calculate_frequency(frequency_changes: &Vec<i32>) -> i32 {
frequency_changes.iter().sum()
}
pub fn calculate_first_frequency_twice(frequency_changes: &Vec<i32>) -> i32 {
let mut starting_freq = 0;
let mut seen_frequencies = HashSet::new();
seen_frequencies.insert(starting_freq);
loop {
for freq_change in frequency_changes {
starting_freq += freq_change;
if seen_frequencies.contains(&starting_freq) {
return starting_freq;
}
seen_frequencies.insert(starting_freq);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_calculates_frequency() {
assert_eq!(calculate_frequency(&vec![1, -2, 3, 1]), 3);
assert_eq!(calculate_frequency(&vec![1, 1, 1]), 3);
assert_eq!(calculate_frequency(&vec![1, 1, -2]), 0);
assert_eq!(calculate_frequency(&vec![-1, -2, -3]), -6);
}
#[test]
fn it_calculates_first_frequency_twice() {
assert_eq!(calculate_first_frequency_twice(&vec![1, -2, 3, 1, 1, -2]), 2);
assert_eq!(calculate_first_frequency_twice(&vec![1, -1]), 0);
assert_eq!(calculate_first_frequency_twice(&vec![3, 3, 4, -2, -4]), 10);
assert_eq!(calculate_first_frequency_twice(&vec![-6, 3, 8, 5, -6]), 5);
assert_eq!(calculate_first_frequency_twice(&vec![7, 7, -2, -7, -4]), 14);
}
}
|
pub fn main() {
let default = 2;
match default {
//闭包写法
_ => {}
}
}
|
extern crate abxml;
use abxml::decoder::Decoder;
use abxml::model::builder::Xml;
use abxml::model::owned::{AttributeBuf, StringTableBuf, XmlTagEndBuf, XmlTagStartBuf};
#[test]
fn it_can_generate_a_decoder_from_a_buffer() {
let arsc = vec![2, 0, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0];
let mut xml = Xml::default();
let mut st = StringTableBuf::default();
st.add_string("Some string".to_string());
st.add_string("Another srtring".to_string());
st.add_string("start_tag".to_string());
st.add_string("key".to_string());
st.add_string("value".to_string());
let attribute = AttributeBuf::new(0xFFFF_FFFF, 3, 0xFFFF_FFFF, 3 << 24, 4);
let mut tag_start = XmlTagStartBuf::new(2, 0, 0xFFFF_FFFF, 2, 0, 0);
tag_start.add_attribute(attribute);
xml.push_owned(Box::new(st));
xml.push_owned(Box::new(tag_start));
xml.push_owned(Box::new(XmlTagEndBuf::new(90)));
let xml_content = xml.into_vec().unwrap();
let decoder = Decoder::from_buffer(&arsc).unwrap();
let xml_visitor = decoder.xml_visitor(&xml_content).unwrap();
let out = xml_visitor.into_string().unwrap();
let inner_expected = "<start_tag key=\"value\" />";
let expected = format!(
"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n{}",
inner_expected
);
assert_eq!(expected, out);
}
|
#![cfg_attr(not(feature = "std"), no_std)]
#![allow(clippy::too_many_arguments)]
#![allow(clippy::unnecessary_mut_passed)]
pub use pallet_deip::*;
use sp_std::vec::Vec;
use codec::{Codec};
// Here we declare the runtime API. It is implemented it the `impl` block in
// runtime amalgamator file (the `runtime/src/lib.rs`)
sp_api::decl_runtime_apis! {
pub trait DeipApi<AccountId>
where AccountId: Codec,
{
fn get_projects() -> Vec<(ProjectId, AccountId)>;
fn get_project(project_id: &ProjectId) -> Project<H256, AccountId>;
fn get_domains() -> Vec<Domain>;
fn get_domain(domain_id: &DomainId) -> Domain;
fn get_project_content_list(content_ids: &Option<Vec<ProjectContentId>>) -> Vec<ProjectContent<H256, AccountId>>;
fn get_project_content(project_id: &ProjectId, project_content_id: &ProjectContentId) -> ProjectContent<H256, AccountId>;
fn get_nda_list() -> Vec<Nda<H256, AccountId, u64>>;
fn get_nda(nda_id: &NdaId) -> Nda<H256, AccountId, u64>;
fn get_reviews() -> Vec<Review<H256, AccountId>>;
fn get_review(review_id: &ReviewId) -> Review<H256, AccountId>;
}
}
|
use std::{borrow::Cow, str::FromStr};
use rosu_v2::prelude::GameMode;
use twilight_http::request::channel::reaction::RequestReactionType;
use twilight_model::{channel::ReactionType, id::Id};
use crate::CONFIG;
use super::constants::common_literals::OSU;
#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
pub enum Emote {
Std,
Tko,
Ctb,
Mna,
Osu,
Twitch,
Tracking,
Minimize,
Expand,
JumpStart,
MultiStepBack,
SingleStepBack,
MyPosition,
SingleStep,
MultiStep,
JumpEnd,
Miss,
Custom(&'static str),
}
impl Emote {
pub fn text(self) -> Cow<'static, str> {
if let Self::Custom(emote) = self {
format!(":{emote}:").into()
} else {
CONFIG.get().unwrap().emotes.get(&self).unwrap().into()
}
}
pub fn request_reaction_type(&self) -> RequestReactionType<'_> {
let emotes = &CONFIG.get().unwrap().emotes;
let emote = if let Self::Custom(name) = self {
return RequestReactionType::Unicode { name };
} else {
emotes.get(self)
};
let (id, name) = emote
.unwrap_or_else(|| panic!("No {self:?} emote in config"))
.split_emote();
RequestReactionType::Custom {
id: Id::new(id),
name: Some(name),
}
}
#[allow(dead_code)]
pub fn reaction_type(&self) -> ReactionType {
let emotes = &CONFIG.get().unwrap().emotes;
let emote = if let Self::Custom(name) = self {
return ReactionType::Unicode {
name: name.to_string(),
};
} else {
emotes.get(self)
};
let (id, name) = emote
.unwrap_or_else(|| panic!("No {self:?} emote in config"))
.split_emote();
ReactionType::Custom {
animated: false,
id: Id::new(id),
name: Some(name.to_owned()),
}
}
}
impl From<GameMode> for Emote {
fn from(mode: GameMode) -> Self {
match mode {
GameMode::STD => Self::Std,
GameMode::TKO => Self::Tko,
GameMode::CTB => Self::Ctb,
GameMode::MNA => Self::Mna,
}
}
}
trait SplitEmote {
fn split_emote(&self) -> (u64, &str);
}
impl SplitEmote for String {
fn split_emote(&self) -> (u64, &str) {
let mut split = self.split(':');
let name = split.nth(1).unwrap();
let id = split.next().unwrap();
let id = u64::from_str(&id[0..id.len() - 1]).unwrap();
(id, name)
}
}
impl FromStr for Emote {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
let emote = match s {
OSU => Self::Osu,
"osu_std" => Self::Std,
"osu_taiko" => Self::Tko,
"osu_ctb" => Self::Ctb,
"osu_mania" => Self::Mna,
"twitch" => Self::Twitch,
"tracking" => Self::Tracking,
"minimize" => Self::Minimize,
"expand" => Self::Expand,
"jump_start" => Self::JumpStart,
"multi_step_back" => Self::MultiStepBack,
"single_step_back" => Self::SingleStepBack,
"my_position" => Self::MyPosition,
"single_step" => Self::SingleStep,
"multi_step" => Self::MultiStep,
"jump_end" => Self::JumpEnd,
"miss" => Self::Miss,
_ => return Err(()),
};
Ok(emote)
}
}
|
macro_rules! slice_to_elem {
($data:expr, $elem:ident, $compressed:expr) => {{
use pairing_plus::{bls12_381::$elem, serdes::SerDes};
$elem::deserialize($data, $compressed)
}};
}
macro_rules! from_impl {
($name:ident, $type:ident, $size:expr) => {
impl TryFrom<Vec<u8>> for $name {
type Error = BBSError;
fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {
Self::try_from(value.as_slice())
}
}
impl TryFrom<&[u8]> for $name {
type Error = BBSError;
fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
let mut value = value;
let inner = $type::deserialize(&mut value, true)?;
Ok(Self(inner))
}
}
impl From<[u8; $size]> for $name {
fn from(data: [u8; $size]) -> Self {
Self::from(&data)
}
}
impl From<&[u8; $size]> for $name {
fn from(data: &[u8; $size]) -> Self {
Self($type::deserialize(&mut data.as_ref(), true).unwrap())
}
}
impl From<$type> for $name {
fn from(src: $type) -> Self {
Self(src.clone())
}
}
impl From<&$type> for $name {
fn from(src: &$type) -> Self {
Self(src.clone())
}
}
};
($name:ident, $type:ident, $comp_size:expr,$uncomp_size:expr) => {
impl TryFrom<Vec<u8>> for $name {
type Error = BBSError;
fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {
Self::try_from(value.as_slice())
}
}
impl TryFrom<&[u8]> for $name {
type Error = BBSError;
fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
let inner = $type::deserialize(&mut value.as_ref(), value.len() == $comp_size)?;
Ok(Self(inner))
}
}
impl From<[u8; $comp_size]> for $name {
fn from(data: [u8; $comp_size]) -> Self {
Self::from(&data)
}
}
impl From<&[u8; $comp_size]> for $name {
fn from(data: &[u8; $comp_size]) -> Self {
Self($type::deserialize(&mut data.as_ref(), true).unwrap())
}
}
impl From<[u8; $uncomp_size]> for $name {
fn from(data: [u8; $uncomp_size]) -> Self {
Self::from(&data)
}
}
impl From<&[u8; $uncomp_size]> for $name {
fn from(data: &[u8; $uncomp_size]) -> Self {
Self($type::deserialize(&mut data.as_ref(), false).unwrap())
}
}
impl From<$type> for $name {
fn from(src: $type) -> Self {
Self(src)
}
}
impl From<&$type> for $name {
fn from(src: &$type) -> Self {
Self(src.clone())
}
}
};
}
macro_rules! try_from_impl {
($name:ident, $error:ident) => {
impl TryFrom<&[u8]> for $name {
type Error = $error;
fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
Self::from_bytes_compressed_form(value)
}
}
impl TryFrom<Vec<u8>> for $name {
type Error = $error;
fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {
Self::from_bytes_compressed_form(value)
}
}
};
}
macro_rules! display_impl {
($name:ident) => {
impl Display for $name {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
let bytes = self.to_bytes_uncompressed_form();
write!(f, "{} {{ {} }}", stringify!($name), hex::encode(&bytes[..]))
}
}
};
}
macro_rules! serdes_impl {
($name:ident) => {
impl Serialize for $name {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_bytes(&self.to_bytes_compressed_form()[..])
}
}
impl<'a> Deserialize<'a> for $name {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'a>,
{
struct DeserializeVisitor;
impl<'a> Visitor<'a> for DeserializeVisitor {
type Value = $name;
fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
formatter.write_str("expected byte array")
}
fn visit_bytes<E>(self, value: &[u8]) -> Result<$name, E>
where
E: DError,
{
$name::try_from(value).map_err(|_| {
DError::invalid_value(serde::de::Unexpected::Bytes(value), &self)
})
}
}
deserializer.deserialize_bytes(DeserializeVisitor)
}
}
};
}
macro_rules! to_fixed_length_bytes_impl {
($name:ident, $type:ident, $compressed:expr, $uncompressed:expr) => {
/// Convert to raw bytes compressed form
pub fn to_bytes_compressed_form(&self) -> [u8; $compressed] {
let mut o = [0u8; $compressed];
self.0.serialize(&mut o[..].as_mut(), true).unwrap();
o
}
/// Convert to raw bytes uncompressed form
pub fn to_bytes_uncompressed_form(&self) -> [u8; $uncompressed] {
let mut o = [0u8; $uncompressed];
self.0.serialize(&mut o[..].as_mut(), false).unwrap();
o
}
};
}
macro_rules! hash_elem_impl {
($name:ident, $func:expr) => {
impl HashElem for $name {
type Output = $name;
fn hash<I: AsRef<[u8]>>(data: I) -> Self::Output {
$func(data.as_ref())
}
}
};
}
macro_rules! random_elem_impl {
($name:ident, $func:block) => {
impl RandomElem for $name {
type Output = $name;
fn random() -> Self::Output $func
}
};
}
macro_rules! as_ref_impl {
($name:ident, $inner:ident) => {
impl AsRef<$inner> for $name {
fn as_ref(&self) -> &$inner {
&self.0
}
}
};
}
|
#[doc = "Register `VPCCR` reader"]
pub type R = crate::R<VPCCR_SPEC>;
#[doc = "Field `VPSIZE` reader - VPSIZE"]
pub type VPSIZE_R = crate::FieldReader<u16>;
impl R {
#[doc = "Bits 0:13 - VPSIZE"]
#[inline(always)]
pub fn vpsize(&self) -> VPSIZE_R {
VPSIZE_R::new((self.bits & 0x3fff) as u16)
}
}
#[doc = "DSI Host video packet current configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`vpccr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct VPCCR_SPEC;
impl crate::RegisterSpec for VPCCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`vpccr::R`](R) reader structure"]
impl crate::Readable for VPCCR_SPEC {}
#[doc = "`reset()` method sets VPCCR to value 0"]
impl crate::Resettable for VPCCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crate::{
effects::{LinearFadeOut, LinearFadeOutBuilder, Take},
num::{Natural, NaturalRatio, Real},
};
use std::{
io::{Seek, Write},
time::Duration,
};
pub trait Source: Iterator<Item = Real> + Send + Sync {
fn len(&self) -> Option<usize>;
fn duration(&self) -> Option<Duration>;
fn channels(&self) -> u16 {
1
}
fn sample_rate(&self) -> u32 {
48000
}
fn fade_out(self) -> LinearFadeOut<Self>
where
Self: Sized,
{
LinearFadeOutBuilder::default().finish(self)
}
fn take_samples(self, samples: usize) -> Take<Self>
where
Self: Sized,
{
Take::new(self, samples)
}
fn take_duration(self, duration: Duration) -> Take<Self>
where
Self: Sized,
{
let sample_time = NaturalRatio::new(
Duration::from_secs(1).as_nanos(),
self.sample_rate() as Natural,
);
let nanos = NaturalRatio::from(duration.as_nanos());
self.take_samples((nanos / sample_time).round().to_integer() as usize)
}
fn to_wav<W>(self, target: W) -> Result<(), hound::Error>
where
Self: Sized,
W: Write + Seek,
{
let mut writer = hound::WavWriter::new(
target,
hound::WavSpec {
channels: self.channels(),
sample_rate: self.sample_rate(),
bits_per_sample: 32,
sample_format: hound::SampleFormat::Float,
},
)?;
for sample in self {
writer.write_sample(sample as f32)?;
}
writer.flush()?;
Ok(())
}
}
impl<'this, S> Source for &'this mut S
where
S: Source,
{
fn len(&self) -> Option<usize> {
(**self).len()
}
fn duration(&self) -> Option<Duration> {
(**self).duration()
}
fn channels(&self) -> u16 {
(**self).channels()
}
fn sample_rate(&self) -> u32 {
(**self).sample_rate()
}
}
impl<S> Source for Box<S>
where
S: Source + ?Sized,
{
fn len(&self) -> Option<usize> {
(**self).len()
}
fn duration(&self) -> Option<Duration> {
(**self).duration()
}
fn channels(&self) -> u16 {
(**self).channels()
}
fn sample_rate(&self) -> u32 {
(**self).sample_rate()
}
}
pub trait SourceBuilder {
type Source: Source;
fn get_channels(&self) -> u16;
fn get_sample_rate(&self) -> u32;
fn finish(&self) -> Self::Source;
}
impl<'builder, B> SourceBuilder for &'builder mut B
where
B: SourceBuilder,
{
type Source = B::Source;
fn get_channels(&self) -> u16 {
(**self).get_channels()
}
fn get_sample_rate(&self) -> u32 {
(**self).get_sample_rate()
}
fn finish(&self) -> Self::Source {
(**self).finish()
}
}
impl<B> SourceBuilder for Box<B>
where
B: SourceBuilder + ?Sized,
{
type Source = B::Source;
fn get_channels(&self) -> u16 {
(**self).get_channels()
}
fn get_sample_rate(&self) -> u32 {
(**self).get_sample_rate()
}
fn finish(&self) -> Self::Source {
(**self).finish()
}
}
#[derive(Debug, Clone)]
pub struct Silence {
sample_rate: u32,
channels: u16,
}
impl Iterator for Silence {
type Item = Real;
fn next(&mut self) -> Option<Self::Item> {
Some(0.0)
}
}
impl Source for Silence {
fn len(&self) -> Option<usize> {
None
}
fn duration(&self) -> Option<Duration> {
None
}
fn channels(&self) -> u16 {
self.channels
}
fn sample_rate(&self) -> u32 {
self.sample_rate
}
}
#[derive(Debug, Clone)]
pub struct SilenceBuilder {
sample_rate: u32,
channels: u16,
}
impl Default for SilenceBuilder {
fn default() -> Self {
Self { sample_rate: 48000, channels: 1 }
}
}
impl SilenceBuilder {
pub fn sample_rate(&mut self, sample_rate: u32) -> &mut Self {
self.sample_rate = sample_rate;
self
}
pub fn channels(&mut self, channels: u16) -> &mut Self {
self.channels = channels;
self
}
}
impl SourceBuilder for SilenceBuilder {
type Source = Silence;
fn get_sample_rate(&self) -> u32 {
self.sample_rate
}
fn get_channels(&self) -> u16 {
self.channels
}
fn finish(&self) -> Self::Source {
Silence { sample_rate: self.sample_rate, channels: self.channels }
}
}
|
use super::app_config::CKBAppConfig;
use ckb_chain_spec::consensus::Consensus;
use ckb_jsonrpc_types::ScriptHashType;
use ckb_miner::MinerConfig;
use ckb_pow::PowEngine;
use std::path::PathBuf;
use std::sync::Arc;
pub struct ExportArgs {
pub config: Box<CKBAppConfig>,
pub consensus: Consensus,
pub target: PathBuf,
}
pub struct ImportArgs {
pub config: Box<CKBAppConfig>,
pub consensus: Consensus,
pub source: PathBuf,
}
pub struct RunArgs {
pub config: Box<CKBAppConfig>,
pub consensus: Consensus,
pub block_assembler_advanced: bool,
}
pub struct ProfArgs {
pub config: Box<CKBAppConfig>,
pub consensus: Consensus,
pub from: u64,
pub to: u64,
}
pub struct MinerArgs {
pub config: MinerConfig,
pub pow_engine: Arc<dyn PowEngine>,
}
pub struct StatsArgs {
pub config: Box<CKBAppConfig>,
pub consensus: Consensus,
pub from: Option<u64>,
pub to: Option<u64>,
}
pub struct InitArgs {
pub interactive: bool,
pub root_dir: PathBuf,
pub chain: String,
pub rpc_port: String,
pub p2p_port: String,
pub log_to_file: bool,
pub log_to_stdout: bool,
pub list_chains: bool,
pub force: bool,
pub block_assembler_code_hash: Option<String>,
pub block_assembler_args: Vec<String>,
pub block_assembler_hash_type: ScriptHashType,
pub block_assembler_message: Option<String>,
pub import_spec: Option<String>,
}
pub struct ResetDataArgs {
pub force: bool,
pub all: bool,
pub database: bool,
pub indexer: bool,
pub network: bool,
pub network_peer_store: bool,
pub network_secret_key: bool,
pub logs: bool,
pub data_dir: PathBuf,
pub db_path: PathBuf,
pub indexer_db_path: PathBuf,
pub network_dir: PathBuf,
pub network_peer_store_path: PathBuf,
pub network_secret_key_path: PathBuf,
pub logs_dir: Option<PathBuf>,
}
|
mod access;
mod construct;
#[proc_macro]
pub fn implement_accessors(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
access::implement(input)
}
#[proc_macro]
pub fn define_trait(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
construct::define_trait(input)
}
#[proc_macro]
pub fn implement_trait(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
construct::implement_trait(input)
}
|
use traffic_protocol::*;
use traffic_controls::*;
use std::sync::mpsc::{Sender};
use std::intrinsics;
#[derive(Debug, Clone)]
pub enum SignalGroupState {
Start,
Busy { start: i32 },
ForceRed,
Done,
}
#[derive(Debug, Clone)]
pub struct SignalGroup<'a> {
pub controls: Vec<ControlWithState<'a>>,
pub state: SignalGroupState,
pub unlimited_green: bool,
pub max_green: i32,
pub is_bus: bool,
}
const MAX_GREEN_TEMP: i32 = 15;
impl<'a> SignalGroup<'a> {
pub fn new(controls: Vec<&'a Control>, unlimited_green: bool) -> SignalGroup<'a> {
SignalGroup {
controls: controls.iter().map(|c| ControlWithState::new(c)).collect(),
state: SignalGroupState::Start,
unlimited_green: unlimited_green,
max_green: MAX_GREEN_TEMP,
is_bus: false,
}
}
pub fn new_bus(controls: Vec<&'a Control>, unlimited_green: bool) -> SignalGroup<'a> {
SignalGroup {
controls: controls.iter().map(|c| ControlWithState::new(c)).collect(),
state: SignalGroupState::Start,
unlimited_green: unlimited_green,
max_green: MAX_GREEN_TEMP,
is_bus: true,
}
}
pub fn empty() -> SignalGroup<'a> {
SignalGroup {
controls: vec![],
state: SignalGroupState::Start,
unlimited_green: false,
max_green: MAX_GREEN_TEMP,
is_bus: false,
}
}
pub fn clone_with(&self, state: SignalGroupState) -> SignalGroup<'a> {
SignalGroup {
controls: self.controls.clone(),
state: state,
unlimited_green: self.unlimited_green,
max_green: self.max_green,
is_bus: self.is_bus
}
}
pub fn run_loop(&mut self, time: i32, out_tx: &Sender<String>, sensor_states: &SensorStates) -> Option<SignalGroupState> {
match self.state {
SignalGroupState::Start => {
println!("=> Starting ControlGroup");
let json_str = match self.is_bus {
true => out_compat_json_str(self.construct_bulk_json(JsonState::BusRechtdoorRechtsaf)),
false => out_compat_json_str(self.construct_bulk_json(JsonState::Groen)),
};
out_tx.send(json_str).unwrap();
Some(SignalGroupState::Busy{ start: time })
},
SignalGroupState::ForceRed => {
println!("=> Forced to stop this ControlGroup");
self.force_red();
Some(SignalGroupState::Busy{ start:time })
},
SignalGroupState::Busy { start } => {
println!("=> Busy {:?} {:?}", start + self.max_green, self.ids());
if self.controls_have_state(TrafficLightState::Red) {
Some(SignalGroupState::Done)
}
else if !self.unlimited_green && time >= start + self.max_green {
println!("forcing red...");
Some(SignalGroupState::ForceRed)
}
else {
self.run_loops(time, out_tx, sensor_states);
None
}
},
SignalGroupState::Done => {
println!("=> Controlgroup done");
None
}
}
}
pub fn controls_have_state(&self, phase: TrafficLightState) -> bool {
unsafe {
let other_state = intrinsics::discriminant_value(&phase);
self.controls.iter().all(|c| intrinsics::discriminant_value(&c.state) == other_state)
}
}
fn force_red(&mut self) {
for c in &mut self.controls {
c.force_red = true;
}
}
fn run_loops(&mut self, time: i32, out_tx: &Sender<String>, sensor_states: &SensorStates) {
for c in &mut self.controls {
c.run_loop(time, out_tx, sensor_states, self.unlimited_green);
}
}
fn construct_bulk_json(&self, state: JsonState) -> Vec<StoplichtJson> {
self.controls.iter().flat_map(|ref c| c.inner.json_objs(state)).collect()
}
fn ids(&self) -> Vec<Vec<usize>> {
self.controls.iter().map(|c| c.inner.get_ids()).collect()
}
}
|
// Copyright 2016 Indoc Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![cfg(feature = "compiletest")]
extern crate compiletest_rs as compiletest;
use std::env;
fn run_dir(dir: &'static str) {
let mut config = compiletest::Config::default();
config.mode = compiletest::common::Mode::Ui;
config.target_rustcflags = Some("-L ../target/debug/deps".to_owned());
if let Ok(name) = env::var("TESTNAME") {
config.filter = Some(name);
}
config.src_base = format!("tests/{}", dir).into();
config.build_base = std::path::PathBuf::from("../target/ui");
compiletest::run_tests(&config);
}
#[cfg(not(feature = "unstable"))]
#[test]
fn ui() {
run_dir("ui-stable");
}
#[cfg(feature = "unstable")]
#[test]
fn ui() {
run_dir("ui-unstable");
}
|
use shorthand::ShortHand;
#[derive(ShortHand, Default)]
struct Example<'a> {
value: Option<&'a str>,
other: Option<usize>,
}
#[test]
fn test_option_copy_as_ref() {
let _: Option<&str> = Example::default().value();
let _: Option<usize> = Example::default().other();
}
fn main() {}
|
use super::*;
#[derive(Debug)]
pub struct History {
queue: BoundedSet<String>,
pos: i32,
}
impl Default for History {
fn default() -> Self {
Self {
queue: BoundedSet::new(32),
pos: -1,
}
}
}
impl History {
pub fn new() -> Self {
Self::default()
}
pub fn clear(&mut self) {
self.queue.clear();
self.pos = -1;
}
pub fn append(&mut self, data: impl Into<String>) {
self.queue.insert(data.into());
self.pos = -1;
}
pub fn backward(&mut self) -> Option<&String> {
if self.queue.is_empty() || self.pos as usize == self.queue.len() {
return None;
}
self.pos += 1;
self.queue.iter().rev().nth(self.pos as usize)
}
pub fn forward(&mut self) -> Option<&String> {
if self.queue.is_empty() {
return None;
}
if self.pos > 0 {
self.pos -= 1;
}
self.queue.iter().rev().nth(self.pos as usize)
}
}
|
/// Library to cipher and decipher texts using substitution method.
use crate::{ErrorKind, Result, ResultExt, Error};
use std::collections::HashSet;
pub const DEFAULT_CHARSET: &'static str = "abcdefghijklmnopqrstuvwxyz";
/// Check used key is a valid one for substitution method with this charset.
///
/// # Parameters:
/// * key: Secret key. In substitution method it corresponds with how to
/// substitute each character in the charset. Both ends should know this and
/// use the same one. Besides key should have the same length than charset and
/// no repeated characters.
/// * charset: Charset used for substitution method. Both ends, ciphering
/// and deciphering, should use the same charset or original text won't be properly
/// recovered.
///
/// # Raises:
/// * ErrorKind::WrongKeyLength: If given key has wrong length.
/// * ErrorKind::WrongKeyRepeatedCharacters: If given key has repeated characters.
fn check_substitution_key<T, U>(key: T, charset: U) -> Result<()>
where T: AsRef<str>,
U: AsRef<str> {
let charset_set: HashSet<char> = charset.as_ref().chars().collect();
let key_set: HashSet<char> = key.as_ref().chars().collect();
if key.as_ref().len() != charset.as_ref().len() {
bail!(ErrorKind::WrongKeyLength(key.as_ref().to_string(), charset.as_ref().to_string()))
} else if key_set.len() != charset_set.len() {
bail!(ErrorKind::WrongKeyRepeatedCharacters(key.as_ref().to_string()))
}
Ok(())
}
/// Cipher given text using substitution method.
///
/// Be aware that different languages use different charsets. Default charset
/// is for english language, if you are using any other you should use a proper
/// dataset. For instance, if you are ciphering an spanish text, you should use
/// a charset with "ñ" character.
///
/// This module uses only lowercase charsets. That means that caps will be kept
/// but lowercase and uppercase will follow ths same substitutions.
///
/// # Parameters:
/// * text: Text to be ciphered.
/// * key: Secret key. In substitution method it corresponds with how to
/// substitute each character in the charset. Both ends should know this and
/// use the same one. Besides key should have the same length than charset and
/// no repeated characters.
/// * charset: Charset used for substitution method. Both ends, ciphering
/// and deciphering, should use the same charset or original text won't be properly
/// recovered.
///
/// # Returns:
/// * Ciphered text.
///
/// # Raises:
/// * CharacterMappingError: If there were an error mapping a char to its substitution.
// pub fn cipher<T, U, V>(text: T, key: U, charset: V) -> Result<String>
// where T: AsRef<str>,
// U: AsRef<str>,
// V: AsRef<str> {
pub fn cipher(text: &str, key: &str, charset: &str) -> Result<String> {
check_substitution_key(&key, &charset)?;
let mut ciphered_message: String = String::new();
let key_chars: Vec<char> = key.chars().collect();
for _char in text.chars() {
if charset.contains(_char.to_lowercase().to_string().as_str()) {
let lowercase_char_debug: Vec<char> = _char.to_lowercase().collect();
let char_to_find = lowercase_char_debug[0];
let charset_vec: Vec<char> = charset.chars().collect();
let charset_index = match charset_vec.iter().position(|ch| ch.to_string() == char_to_find.to_string()){
Some(index) => index,
None => bail!(ErrorKind::CharacterMappingError(_char.to_lowercase().to_string()))
};
let mapped_char: char = key_chars[charset_index];
let ciphered_chars: String = if _char.is_lowercase() {
mapped_char.to_string()
} else {
mapped_char.to_uppercase().to_string()
};
ciphered_message.push_str(ciphered_chars.as_str());
} else {
ciphered_message.push_str(_char.to_string().as_str())
}
}
Ok(ciphered_message)
}
/// Decipher given text using substitution method.
///
/// Note you should use the same charset that ciphering end did.
///
/// # Parameters:
/// * ciphered_text: Text to be deciphered.
/// * key: Secret key. In substitution method it corresponds with how to
/// substitute each character in the charset. Both ends should know this and
/// use the same one. Besides key should have the same length than charset and
/// no repeated characters.
/// * charset: Charset used for substitution method. Both ends, ciphering
/// and deciphering, should use the same charset or original text won't be properly
/// recovered.
///
/// # Returns:
/// * Deciphered text.
///
/// # Raises:
/// * CharacterMappingError: If there were an error mapping a char to its substitution.
// pub fn decipher<T, U, V>(ciphered_text: T, key: U, charset: V) -> Result<String>
// where T: AsRef<str>,
// U: AsRef<str>,
// V: AsRef<str> {
pub fn decipher(ciphered_text: &str, key: &str, charset: &str) -> Result<String>{
check_substitution_key(&key, &charset)?;
let mut deciphered_message = String::new();
let charset_chars: Vec<char> = charset.chars().collect();
let key_vec: Vec<char> = key.chars().collect();
for ciphered_char in ciphered_text.chars() {
let lowercase_char_debug: Vec<char> = ciphered_char.to_lowercase().collect();
let char_to_find = lowercase_char_debug[0];
if key.contains(ciphered_char.to_lowercase().to_string().as_str()) {
let key_index = match key_vec.iter().position(|ch| ch.to_string() == char_to_find.to_string()) {
Some(index) => index,
None => bail!(ErrorKind::CharacterMappingError(ciphered_char.to_lowercase().to_string()))
};
let deciphered_char = charset_chars[key_index];
let deciphered_chars = if ciphered_char.is_lowercase() {
deciphered_char.to_string()
} else {
deciphered_char.to_string().to_uppercase()
};
deciphered_message.push_str(deciphered_chars.as_str());
} else {
deciphered_message.push_str(ciphered_char.to_string().as_str());
}
}
Ok(deciphered_message)
}
#[cfg(test)]
pub mod tests {
use super::*;
pub const TEST_CHARSET: &'static str = "abcdefghijklmnopqrstuvwxyz";
pub const TEST_KEY: &'static str = "lfwoayuisvkmnxpbdcrjtqeghz";
pub const ORIGINAL_MESSAGE: &'static str = "If a man is offered a fact which goes against his \
instincts, he will scrutinize it closely, and unless \
the evidence is overwhelming, he will refuse to believe \
it. If, on the other hand, he is offered something which \
affords a reason for acting in accordance to his \
instincts, he will accept it even on the slightest \
evidence. The origin of myths is explained in this way. \
-Bertrand Russell";
pub const CIPHERED_MESSAGE: &'static str = "Sy l nlx sr pyyacao l ylwj eiswi upar lulsxrj isr \
sxrjsxwjr, ia esmm rwctjsxsza sj wmpramh, lxo txmarr \
jia aqsoaxwa sr pqaceiamnsxu, ia esmm caytra \
jp famsaqa sj. Sy, px jia pjiac ilxo, ia sr \
pyyacao rpnajisxu eiswi lyypcor l calrpx ypc \
lwjsxu sx lwwpcolxwa jp isr sxrjsxwjr, ia esmm \
lwwabj sj aqax px jia rmsuijarj aqsoaxwa. Jia pcsusx \
py nhjir sr agbmlsxao sx jisr elh. -Facjclxo Ctrramm";
#[test]
fn test_cipher() {
match cipher(ORIGINAL_MESSAGE, TEST_KEY, TEST_CHARSET) {
Ok(ciphered_text) => {
assert_eq!(CIPHERED_MESSAGE, ciphered_text, "Message was not ciphered as we were expecting.")
},
Err(E) => {
assert!(false, format!("Error happened: {}", E))
}
}
}
#[test]
fn test_decipher() {
match decipher(CIPHERED_MESSAGE, TEST_KEY, TEST_CHARSET) {
Ok(deciphered_text) => {
assert_eq!(ORIGINAL_MESSAGE, deciphered_text, "Deciphered message was not the one we were expecting")
},
Err(E) => {
assert!(false, format!("Error happened: {}", E))
}
}
}
#[test]
fn test_wrong_length_key_are_detected() {
let test_charset = "123";
let wrong_key = "1234";
if let Err(E) = cipher("", wrong_key, test_charset) {
match Error::from(E) {
Error(ErrorKind::WrongKeyLength(_, _), _) => assert!(true),
error => assert!(false, format!("Raised error was not the one \
we were expecting but {} instead", error))
}
} else { assert!(false, "No error was raised when wrong key used.") }
}
#[test]
fn test_repeated_character_keys_are_detected() {
let test_charset = "123";
let wrong_key = "122";
if let Err(E) = cipher("", wrong_key, test_charset) {
match Error::from(E) {
Error(ErrorKind::WrongKeyRepeatedCharacters(_), _) => assert!(true),
error => assert!(false, format!("Raised error was not the one \
we were expecting but {} instead", error))
}
} else { assert!(false, "No error was raised when wrong key used.") }
}
} |
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate serde_json;
extern crate vlq;
extern crate xml;
extern crate globset;
extern crate lcov_parser;
extern crate memmap;
pub mod debug;
pub mod settings;
mod lcov;
mod lines;
mod load;
mod model;
mod range;
mod source_map;
mod vlq_decode;
mod util;
use std::path::Path;
use crate::lines::calculate_executable_line_mappings;
use crate::lines::calculate_line_coverage;
use crate::lines::FileCoverage;
use crate::lines::ManyCoverage;
use crate::model::{PuppeteerData, SourceMap};
use crate::settings::Settings;
use crate::source_map::*;
use std::collections::HashMap;
use std::collections::HashSet;
use std::io::Write;
use std::io;
pub fn process_source_map(settings: &Settings, data: PuppeteerData) -> Option<Vec<FileCoverage>> {
if let Some(source_mapping_url) = data.get_source_mapping_url() {
let source_path = data.url.replace(&settings.public_url_base, &settings.dist_path);
let source_mapping_url = source_mapping_url.replace("//# sourceMappingURL=", "");
let source_mapping_path = Path::new(&source_path)
.parent()
.unwrap()
.join(source_mapping_url);
println!("Processing source map {}", source_mapping_path.to_string_lossy());
let source_mapping_path = Path::new(&source_mapping_path);
if source_mapping_path.exists() {
let source_map: SourceMap = match util::deserialize_object(source_mapping_path) {
Ok(source_map) => source_map,
Err(err) => {
eprintln!("Couldn't deserialize source map for {}", source_mapping_path.to_string_lossy());
eprintln!("{}", err);
return None
},
};
let references = process_references(&settings, &source_map);
let file_refs = references.iter().map(|s| s.file_path.clone()).collect();
let line_refs = calculate_executable_line_mappings(&source_map, references);
let mut file_coverage =
calculate_line_coverage(data.ranges, line_refs, file_refs, data.text.as_str());
if let Some(ref reify_against_lcov) = settings.reify_against_lcov {
file_coverage = {
eprintln!("Reifying against LCOV file");
let mut file_hash_map: HashMap<_,_> = file_coverage.into_iter().map(|v| (v.path.clone(), v)).collect();
for line_data in lcov::LcovFilesLines::new(&util::fast_read(&reify_against_lcov).unwrap()) {
let our_coverage = file_hash_map.get_mut(&line_data.file_path);
our_coverage.map(|our_coverage| {
let new_lines : HashSet<_> = line_data.lines.into_iter().collect();
our_coverage.lines.retain(|v| new_lines.contains(&v.line_number));
});
}
file_hash_map.into_iter().map(|(_k,v)| v).collect()
}
}
Some(file_coverage)
} else {
None
}
} else {
None
}
}
pub fn run<P: AsRef<Path>, W: Write>(settings: Settings, json_path: Vec<P>, writer: Option<W>) {
let values = load::load_items(json_path);
let processed: Vec<_> = values
.into_iter()
.map(|value| process_source_map(&settings, value))
.flat_map(|value| value.into_iter())
.flat_map(|value| value.into_iter())
.collect();
let many_coverage = ManyCoverage { files: processed };
if let Some(writer) = writer {
many_coverage.write_xml(writer);
} else {
let stdout = io::stdout();
let handle = stdout.lock();
many_coverage.write_xml(handle);
}
} |
use log::Level;
use memflow::connector::ConnectorArgs;
use memflow_win32::*;
use memflow_coredump::create_connector;
fn main() {
simple_logger::init_with_level(Level::Debug).unwrap();
let connector = create_connector(&ConnectorArgs::with_default("./coredump.raw")).unwrap();
let mut kernel = Kernel::builder(connector).build().unwrap();
let eprocess_list = kernel.eprocess_list().unwrap();
println!("eprocess_list.len() = {}", eprocess_list.len());
}
|
//2520 is the smallest number that can be divided by each of the numbers from 1 to 10 without any remainder.
//
//What is the smallest positive number that is evenly divisible by all of the numbers from 1 to 20?
// brute force test numbers until one is found that is divisable by every number up to bound
fn solve(bound: u64) -> u64 {
let mut i = 0;
let mut modulus;
if bound == 0 {
panic!();
}
loop {
//end result must be divisable by bound, so we can skip a lot of numbers
i += bound;
modulus = 1;
while modulus <= bound {
if i % modulus == 0 {
if modulus == bound {
return i;
}
}
else{
break;
}
modulus += 1;
}
}
}
fn main() {
println!("{}", solve(20).to_string());
}
#[cfg(test)]
mod tests {
#[test]
#[should_panic]
fn least_common_multiple_0() {
super::solve(0);
}
#[test]
fn least_common_multiple_1() {
assert_eq!(1, super::solve(1));
}
#[test]
fn least_common_multiple_10() {
assert_eq!(2520, super::solve(10));
}
#[test]
fn least_common_multiple_20() {
assert_eq!(232792560, super::solve(20));
}
}
|
use bestbuy::{client::BestbuyClient, order::*};
use serde::Serialize;
use serde_json;
use std::env::var;
use std::io::stdout;
pub fn get_client() -> BestbuyClient {
BestbuyClient::new(&var("TOKEN").unwrap())
}
pub fn dump_json<T: Serialize>(v: T) {
serde_json::to_writer_pretty(stdout(), &v).unwrap()
}
pub fn inspect_order(order: Order) {
println!("id: {}", order.order_id);
println!("status: {:?}", order.order_state);
println!("lines:\n");
for line in order.order_lines {
println!(
"\t#{}\t{:?}\t{}\t{}\t{}",
line.order_line_id, line.order_line_state, line.offer_sku, line.price_unit, line.quantity
);
}
}
|
pub mod token;
pub mod lexer;
pub mod evaluator;
use std::fs::File;
use std::io::prelude::*;
use std::env;
extern crate colored;
use colored::*;
fn main() {
let args: Vec<_> = env::args().collect();
if args.len() == 2 {
let mut f = File::open(args[1].to_string()).expect("file not found");
let mut contents = String::new();
f.read_to_string(&mut contents)
.expect("something went wrong reading the file");
let program = lexer::lex(&mut contents);
let stack = evaluator::eval(program, vec![], vec![]);
if stack.len() != 0 {
println!("{}\n{}\n{:?}", "Warning!".yellow().bold(), "The stack was not empty after evaluation:".yellow(), stack);
}
} else {
println!("No input file given");
}
} |
#[repr(packed)]
pub struct IdtDescriptor {
pub size: u16,
pub ptr: u32
}
#[repr(packed)]
pub struct IdtEntry {
pub offsetl: u16,
pub selector: u16,
pub zero: u8,
pub attribute: u8,
pub offseth: u16
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - MDIOS configuration register"]
pub cr: CR,
#[doc = "0x04 - MDIOS write flag register"]
pub wrfr: WRFR,
#[doc = "0x08 - MDIOS clear write flag register"]
pub cwrfr: CWRFR,
#[doc = "0x0c - MDIOS read flag register"]
pub rdfr: RDFR,
#[doc = "0x10 - MDIOS clear read flag register"]
pub crdfr: CRDFR,
#[doc = "0x14 - MDIOS status register"]
pub sr: SR,
#[doc = "0x18 - MDIOS clear flag register"]
pub clrfr: CLRFR,
#[doc = "0x1c - MDIOS input data register 0"]
pub dinr0: DINR0,
#[doc = "0x20 - MDIOS input data register 1"]
pub dinr1: DINR1,
#[doc = "0x24 - MDIOS input data register 2"]
pub dinr2: DINR2,
#[doc = "0x28 - MDIOS input data register 3"]
pub dinr3: DINR3,
#[doc = "0x2c - MDIOS input data register 4"]
pub dinr4: DINR4,
#[doc = "0x30 - MDIOS input data register 5"]
pub dinr5: DINR5,
#[doc = "0x34 - MDIOS input data register 6"]
pub dinr6: DINR6,
#[doc = "0x38 - MDIOS input data register 7"]
pub dinr7: DINR7,
#[doc = "0x3c - MDIOS input data register 8"]
pub dinr8: DINR8,
#[doc = "0x40 - MDIOS input data register 9"]
pub dinr9: DINR9,
#[doc = "0x44 - MDIOS input data register 10"]
pub dinr10: DINR10,
#[doc = "0x48 - MDIOS input data register 11"]
pub dinr11: DINR11,
#[doc = "0x4c - MDIOS input data register 12"]
pub dinr12: DINR12,
#[doc = "0x50 - MDIOS input data register 13"]
pub dinr13: DINR13,
#[doc = "0x54 - MDIOS input data register 14"]
pub dinr14: DINR14,
#[doc = "0x58 - MDIOS input data register 15"]
pub dinr15: DINR15,
#[doc = "0x5c - MDIOS input data register 16"]
pub dinr16: DINR16,
#[doc = "0x60 - MDIOS input data register 17"]
pub dinr17: DINR17,
#[doc = "0x64 - MDIOS input data register 18"]
pub dinr18: DINR18,
#[doc = "0x68 - MDIOS input data register 19"]
pub dinr19: DINR19,
#[doc = "0x6c - MDIOS input data register 20"]
pub dinr20: DINR20,
#[doc = "0x70 - MDIOS input data register 21"]
pub dinr21: DINR21,
#[doc = "0x74 - MDIOS input data register 22"]
pub dinr22: DINR22,
#[doc = "0x78 - MDIOS input data register 23"]
pub dinr23: DINR23,
#[doc = "0x7c - MDIOS input data register 24"]
pub dinr24: DINR24,
#[doc = "0x80 - MDIOS input data register 25"]
pub dinr25: DINR25,
#[doc = "0x84 - MDIOS input data register 26"]
pub dinr26: DINR26,
#[doc = "0x88 - MDIOS input data register 27"]
pub dinr27: DINR27,
#[doc = "0x8c - MDIOS input data register 28"]
pub dinr28: DINR28,
#[doc = "0x90 - MDIOS input data register 29"]
pub dinr29: DINR29,
#[doc = "0x94 - MDIOS input data register 30"]
pub dinr30: DINR30,
#[doc = "0x98 - MDIOS input data register 31"]
pub dinr31: DINR31,
#[doc = "0x9c - MDIOS output data register 0"]
pub doutr0: DOUTR0,
#[doc = "0xa0 - MDIOS output data register 1"]
pub doutr1: DOUTR1,
#[doc = "0xa4 - MDIOS output data register 2"]
pub doutr2: DOUTR2,
#[doc = "0xa8 - MDIOS output data register 3"]
pub doutr3: DOUTR3,
#[doc = "0xac - MDIOS output data register 4"]
pub doutr4: DOUTR4,
#[doc = "0xb0 - MDIOS output data register 5"]
pub doutr5: DOUTR5,
#[doc = "0xb4 - MDIOS output data register 6"]
pub doutr6: DOUTR6,
#[doc = "0xb8 - MDIOS output data register 7"]
pub doutr7: DOUTR7,
#[doc = "0xbc - MDIOS output data register 8"]
pub doutr8: DOUTR8,
#[doc = "0xc0 - MDIOS output data register 9"]
pub doutr9: DOUTR9,
#[doc = "0xc4 - MDIOS output data register 10"]
pub doutr10: DOUTR10,
#[doc = "0xc8 - MDIOS output data register 11"]
pub doutr11: DOUTR11,
#[doc = "0xcc - MDIOS output data register 12"]
pub doutr12: DOUTR12,
#[doc = "0xd0 - MDIOS output data register 13"]
pub doutr13: DOUTR13,
#[doc = "0xd4 - MDIOS output data register 14"]
pub doutr14: DOUTR14,
#[doc = "0xd8 - MDIOS output data register 15"]
pub doutr15: DOUTR15,
#[doc = "0xdc - MDIOS output data register 16"]
pub doutr16: DOUTR16,
#[doc = "0xe0 - MDIOS output data register 17"]
pub doutr17: DOUTR17,
#[doc = "0xe4 - MDIOS output data register 18"]
pub doutr18: DOUTR18,
#[doc = "0xe8 - MDIOS output data register 19"]
pub doutr19: DOUTR19,
#[doc = "0xec - MDIOS output data register 20"]
pub doutr20: DOUTR20,
#[doc = "0xf0 - MDIOS output data register 21"]
pub doutr21: DOUTR21,
#[doc = "0xf4 - MDIOS output data register 22"]
pub doutr22: DOUTR22,
#[doc = "0xf8 - MDIOS output data register 23"]
pub doutr23: DOUTR23,
#[doc = "0xfc - MDIOS output data register 24"]
pub doutr24: DOUTR24,
#[doc = "0x100 - MDIOS output data register 25"]
pub doutr25: DOUTR25,
#[doc = "0x104 - MDIOS output data register 26"]
pub doutr26: DOUTR26,
#[doc = "0x108 - MDIOS output data register 27"]
pub doutr27: DOUTR27,
#[doc = "0x10c - MDIOS output data register 28"]
pub doutr28: DOUTR28,
#[doc = "0x110 - MDIOS output data register 29"]
pub doutr29: DOUTR29,
#[doc = "0x114 - MDIOS output data register 30"]
pub doutr30: DOUTR30,
#[doc = "0x118 - MDIOS output data register 31"]
pub doutr31: DOUTR31,
}
#[doc = "CR (rw) register accessor: MDIOS configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr`]
module"]
pub type CR = crate::Reg<cr::CR_SPEC>;
#[doc = "MDIOS configuration register"]
pub mod cr;
#[doc = "WRFR (r) register accessor: MDIOS write flag register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`wrfr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`wrfr`]
module"]
pub type WRFR = crate::Reg<wrfr::WRFR_SPEC>;
#[doc = "MDIOS write flag register"]
pub mod wrfr;
#[doc = "CWRFR (rw) register accessor: MDIOS clear write flag register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cwrfr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cwrfr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cwrfr`]
module"]
pub type CWRFR = crate::Reg<cwrfr::CWRFR_SPEC>;
#[doc = "MDIOS clear write flag register"]
pub mod cwrfr;
#[doc = "RDFR (r) register accessor: MDIOS read flag register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rdfr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rdfr`]
module"]
pub type RDFR = crate::Reg<rdfr::RDFR_SPEC>;
#[doc = "MDIOS read flag register"]
pub mod rdfr;
#[doc = "CRDFR (rw) register accessor: MDIOS clear read flag register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`crdfr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`crdfr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`crdfr`]
module"]
pub type CRDFR = crate::Reg<crdfr::CRDFR_SPEC>;
#[doc = "MDIOS clear read flag register"]
pub mod crdfr;
#[doc = "SR (r) register accessor: MDIOS status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`sr`]
module"]
pub type SR = crate::Reg<sr::SR_SPEC>;
#[doc = "MDIOS status register"]
pub mod sr;
#[doc = "CLRFR (rw) register accessor: MDIOS clear flag register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`clrfr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`clrfr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`clrfr`]
module"]
pub type CLRFR = crate::Reg<clrfr::CLRFR_SPEC>;
#[doc = "MDIOS clear flag register"]
pub mod clrfr;
#[doc = "DINR0 (r) register accessor: MDIOS input data register 0\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr0::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr0`]
module"]
pub type DINR0 = crate::Reg<dinr0::DINR0_SPEC>;
#[doc = "MDIOS input data register 0"]
pub mod dinr0;
#[doc = "DINR1 (r) register accessor: MDIOS input data register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr1::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr1`]
module"]
pub type DINR1 = crate::Reg<dinr1::DINR1_SPEC>;
#[doc = "MDIOS input data register 1"]
pub mod dinr1;
#[doc = "DINR2 (r) register accessor: MDIOS input data register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr2::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr2`]
module"]
pub type DINR2 = crate::Reg<dinr2::DINR2_SPEC>;
#[doc = "MDIOS input data register 2"]
pub mod dinr2;
#[doc = "DINR3 (r) register accessor: MDIOS input data register 3\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr3::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr3`]
module"]
pub type DINR3 = crate::Reg<dinr3::DINR3_SPEC>;
#[doc = "MDIOS input data register 3"]
pub mod dinr3;
#[doc = "DINR4 (r) register accessor: MDIOS input data register 4\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr4::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr4`]
module"]
pub type DINR4 = crate::Reg<dinr4::DINR4_SPEC>;
#[doc = "MDIOS input data register 4"]
pub mod dinr4;
#[doc = "DINR5 (r) register accessor: MDIOS input data register 5\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr5::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr5`]
module"]
pub type DINR5 = crate::Reg<dinr5::DINR5_SPEC>;
#[doc = "MDIOS input data register 5"]
pub mod dinr5;
#[doc = "DINR6 (r) register accessor: MDIOS input data register 6\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr6::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr6`]
module"]
pub type DINR6 = crate::Reg<dinr6::DINR6_SPEC>;
#[doc = "MDIOS input data register 6"]
pub mod dinr6;
#[doc = "DINR7 (r) register accessor: MDIOS input data register 7\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr7::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr7`]
module"]
pub type DINR7 = crate::Reg<dinr7::DINR7_SPEC>;
#[doc = "MDIOS input data register 7"]
pub mod dinr7;
#[doc = "DINR8 (r) register accessor: MDIOS input data register 8\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr8::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr8`]
module"]
pub type DINR8 = crate::Reg<dinr8::DINR8_SPEC>;
#[doc = "MDIOS input data register 8"]
pub mod dinr8;
#[doc = "DINR9 (r) register accessor: MDIOS input data register 9\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr9::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr9`]
module"]
pub type DINR9 = crate::Reg<dinr9::DINR9_SPEC>;
#[doc = "MDIOS input data register 9"]
pub mod dinr9;
#[doc = "DINR10 (r) register accessor: MDIOS input data register 10\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr10::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr10`]
module"]
pub type DINR10 = crate::Reg<dinr10::DINR10_SPEC>;
#[doc = "MDIOS input data register 10"]
pub mod dinr10;
#[doc = "DINR11 (r) register accessor: MDIOS input data register 11\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr11::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr11`]
module"]
pub type DINR11 = crate::Reg<dinr11::DINR11_SPEC>;
#[doc = "MDIOS input data register 11"]
pub mod dinr11;
#[doc = "DINR12 (r) register accessor: MDIOS input data register 12\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr12::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr12`]
module"]
pub type DINR12 = crate::Reg<dinr12::DINR12_SPEC>;
#[doc = "MDIOS input data register 12"]
pub mod dinr12;
#[doc = "DINR13 (r) register accessor: MDIOS input data register 13\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr13::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr13`]
module"]
pub type DINR13 = crate::Reg<dinr13::DINR13_SPEC>;
#[doc = "MDIOS input data register 13"]
pub mod dinr13;
#[doc = "DINR14 (r) register accessor: MDIOS input data register 14\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr14::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr14`]
module"]
pub type DINR14 = crate::Reg<dinr14::DINR14_SPEC>;
#[doc = "MDIOS input data register 14"]
pub mod dinr14;
#[doc = "DINR15 (r) register accessor: MDIOS input data register 15\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr15::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr15`]
module"]
pub type DINR15 = crate::Reg<dinr15::DINR15_SPEC>;
#[doc = "MDIOS input data register 15"]
pub mod dinr15;
#[doc = "DINR16 (r) register accessor: MDIOS input data register 16\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr16::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr16`]
module"]
pub type DINR16 = crate::Reg<dinr16::DINR16_SPEC>;
#[doc = "MDIOS input data register 16"]
pub mod dinr16;
#[doc = "DINR17 (r) register accessor: MDIOS input data register 17\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr17::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr17`]
module"]
pub type DINR17 = crate::Reg<dinr17::DINR17_SPEC>;
#[doc = "MDIOS input data register 17"]
pub mod dinr17;
#[doc = "DINR18 (r) register accessor: MDIOS input data register 18\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr18::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr18`]
module"]
pub type DINR18 = crate::Reg<dinr18::DINR18_SPEC>;
#[doc = "MDIOS input data register 18"]
pub mod dinr18;
#[doc = "DINR19 (r) register accessor: MDIOS input data register 19\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr19::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr19`]
module"]
pub type DINR19 = crate::Reg<dinr19::DINR19_SPEC>;
#[doc = "MDIOS input data register 19"]
pub mod dinr19;
#[doc = "DINR20 (r) register accessor: MDIOS input data register 20\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr20::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr20`]
module"]
pub type DINR20 = crate::Reg<dinr20::DINR20_SPEC>;
#[doc = "MDIOS input data register 20"]
pub mod dinr20;
#[doc = "DINR21 (r) register accessor: MDIOS input data register 21\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr21::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr21`]
module"]
pub type DINR21 = crate::Reg<dinr21::DINR21_SPEC>;
#[doc = "MDIOS input data register 21"]
pub mod dinr21;
#[doc = "DINR22 (r) register accessor: MDIOS input data register 22\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr22::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr22`]
module"]
pub type DINR22 = crate::Reg<dinr22::DINR22_SPEC>;
#[doc = "MDIOS input data register 22"]
pub mod dinr22;
#[doc = "DINR23 (r) register accessor: MDIOS input data register 23\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr23::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr23`]
module"]
pub type DINR23 = crate::Reg<dinr23::DINR23_SPEC>;
#[doc = "MDIOS input data register 23"]
pub mod dinr23;
#[doc = "DINR24 (r) register accessor: MDIOS input data register 24\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr24::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr24`]
module"]
pub type DINR24 = crate::Reg<dinr24::DINR24_SPEC>;
#[doc = "MDIOS input data register 24"]
pub mod dinr24;
#[doc = "DINR25 (r) register accessor: MDIOS input data register 25\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr25::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr25`]
module"]
pub type DINR25 = crate::Reg<dinr25::DINR25_SPEC>;
#[doc = "MDIOS input data register 25"]
pub mod dinr25;
#[doc = "DINR26 (r) register accessor: MDIOS input data register 26\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr26::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr26`]
module"]
pub type DINR26 = crate::Reg<dinr26::DINR26_SPEC>;
#[doc = "MDIOS input data register 26"]
pub mod dinr26;
#[doc = "DINR27 (r) register accessor: MDIOS input data register 27\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr27::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr27`]
module"]
pub type DINR27 = crate::Reg<dinr27::DINR27_SPEC>;
#[doc = "MDIOS input data register 27"]
pub mod dinr27;
#[doc = "DINR28 (r) register accessor: MDIOS input data register 28\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr28::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr28`]
module"]
pub type DINR28 = crate::Reg<dinr28::DINR28_SPEC>;
#[doc = "MDIOS input data register 28"]
pub mod dinr28;
#[doc = "DINR29 (r) register accessor: MDIOS input data register 29\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr29::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr29`]
module"]
pub type DINR29 = crate::Reg<dinr29::DINR29_SPEC>;
#[doc = "MDIOS input data register 29"]
pub mod dinr29;
#[doc = "DINR30 (r) register accessor: MDIOS input data register 30\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr30::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr30`]
module"]
pub type DINR30 = crate::Reg<dinr30::DINR30_SPEC>;
#[doc = "MDIOS input data register 30"]
pub mod dinr30;
#[doc = "DINR31 (r) register accessor: MDIOS input data register 31\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr31::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dinr31`]
module"]
pub type DINR31 = crate::Reg<dinr31::DINR31_SPEC>;
#[doc = "MDIOS input data register 31"]
pub mod dinr31;
#[doc = "DOUTR0 (rw) register accessor: MDIOS output data register 0\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr0::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr0::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr0`]
module"]
pub type DOUTR0 = crate::Reg<doutr0::DOUTR0_SPEC>;
#[doc = "MDIOS output data register 0"]
pub mod doutr0;
#[doc = "DOUTR1 (rw) register accessor: MDIOS output data register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr1`]
module"]
pub type DOUTR1 = crate::Reg<doutr1::DOUTR1_SPEC>;
#[doc = "MDIOS output data register 1"]
pub mod doutr1;
#[doc = "DOUTR2 (rw) register accessor: MDIOS output data register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr2`]
module"]
pub type DOUTR2 = crate::Reg<doutr2::DOUTR2_SPEC>;
#[doc = "MDIOS output data register 2"]
pub mod doutr2;
#[doc = "DOUTR3 (rw) register accessor: MDIOS output data register 3\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr3::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr3::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr3`]
module"]
pub type DOUTR3 = crate::Reg<doutr3::DOUTR3_SPEC>;
#[doc = "MDIOS output data register 3"]
pub mod doutr3;
#[doc = "DOUTR4 (rw) register accessor: MDIOS output data register 4\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr4::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr4::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr4`]
module"]
pub type DOUTR4 = crate::Reg<doutr4::DOUTR4_SPEC>;
#[doc = "MDIOS output data register 4"]
pub mod doutr4;
#[doc = "DOUTR5 (rw) register accessor: MDIOS output data register 5\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr5::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr5::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr5`]
module"]
pub type DOUTR5 = crate::Reg<doutr5::DOUTR5_SPEC>;
#[doc = "MDIOS output data register 5"]
pub mod doutr5;
#[doc = "DOUTR6 (rw) register accessor: MDIOS output data register 6\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr6::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr6::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr6`]
module"]
pub type DOUTR6 = crate::Reg<doutr6::DOUTR6_SPEC>;
#[doc = "MDIOS output data register 6"]
pub mod doutr6;
#[doc = "DOUTR7 (rw) register accessor: MDIOS output data register 7\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr7::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr7::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr7`]
module"]
pub type DOUTR7 = crate::Reg<doutr7::DOUTR7_SPEC>;
#[doc = "MDIOS output data register 7"]
pub mod doutr7;
#[doc = "DOUTR8 (rw) register accessor: MDIOS output data register 8\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr8::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr8::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr8`]
module"]
pub type DOUTR8 = crate::Reg<doutr8::DOUTR8_SPEC>;
#[doc = "MDIOS output data register 8"]
pub mod doutr8;
#[doc = "DOUTR9 (rw) register accessor: MDIOS output data register 9\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr9::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr9::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr9`]
module"]
pub type DOUTR9 = crate::Reg<doutr9::DOUTR9_SPEC>;
#[doc = "MDIOS output data register 9"]
pub mod doutr9;
#[doc = "DOUTR10 (rw) register accessor: MDIOS output data register 10\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr10::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr10::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr10`]
module"]
pub type DOUTR10 = crate::Reg<doutr10::DOUTR10_SPEC>;
#[doc = "MDIOS output data register 10"]
pub mod doutr10;
#[doc = "DOUTR11 (rw) register accessor: MDIOS output data register 11\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr11::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr11::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr11`]
module"]
pub type DOUTR11 = crate::Reg<doutr11::DOUTR11_SPEC>;
#[doc = "MDIOS output data register 11"]
pub mod doutr11;
#[doc = "DOUTR12 (rw) register accessor: MDIOS output data register 12\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr12::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr12::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr12`]
module"]
pub type DOUTR12 = crate::Reg<doutr12::DOUTR12_SPEC>;
#[doc = "MDIOS output data register 12"]
pub mod doutr12;
#[doc = "DOUTR13 (rw) register accessor: MDIOS output data register 13\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr13::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr13::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr13`]
module"]
pub type DOUTR13 = crate::Reg<doutr13::DOUTR13_SPEC>;
#[doc = "MDIOS output data register 13"]
pub mod doutr13;
#[doc = "DOUTR14 (rw) register accessor: MDIOS output data register 14\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr14::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr14::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr14`]
module"]
pub type DOUTR14 = crate::Reg<doutr14::DOUTR14_SPEC>;
#[doc = "MDIOS output data register 14"]
pub mod doutr14;
#[doc = "DOUTR15 (rw) register accessor: MDIOS output data register 15\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr15::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr15::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr15`]
module"]
pub type DOUTR15 = crate::Reg<doutr15::DOUTR15_SPEC>;
#[doc = "MDIOS output data register 15"]
pub mod doutr15;
#[doc = "DOUTR16 (rw) register accessor: MDIOS output data register 16\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr16::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr16::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr16`]
module"]
pub type DOUTR16 = crate::Reg<doutr16::DOUTR16_SPEC>;
#[doc = "MDIOS output data register 16"]
pub mod doutr16;
#[doc = "DOUTR17 (rw) register accessor: MDIOS output data register 17\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr17::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr17::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr17`]
module"]
pub type DOUTR17 = crate::Reg<doutr17::DOUTR17_SPEC>;
#[doc = "MDIOS output data register 17"]
pub mod doutr17;
#[doc = "DOUTR18 (rw) register accessor: MDIOS output data register 18\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr18::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr18::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr18`]
module"]
pub type DOUTR18 = crate::Reg<doutr18::DOUTR18_SPEC>;
#[doc = "MDIOS output data register 18"]
pub mod doutr18;
#[doc = "DOUTR19 (rw) register accessor: MDIOS output data register 19\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr19::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr19::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr19`]
module"]
pub type DOUTR19 = crate::Reg<doutr19::DOUTR19_SPEC>;
#[doc = "MDIOS output data register 19"]
pub mod doutr19;
#[doc = "DOUTR20 (rw) register accessor: MDIOS output data register 20\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr20::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr20::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr20`]
module"]
pub type DOUTR20 = crate::Reg<doutr20::DOUTR20_SPEC>;
#[doc = "MDIOS output data register 20"]
pub mod doutr20;
#[doc = "DOUTR21 (rw) register accessor: MDIOS output data register 21\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr21::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr21::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr21`]
module"]
pub type DOUTR21 = crate::Reg<doutr21::DOUTR21_SPEC>;
#[doc = "MDIOS output data register 21"]
pub mod doutr21;
#[doc = "DOUTR22 (rw) register accessor: MDIOS output data register 22\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr22::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr22::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr22`]
module"]
pub type DOUTR22 = crate::Reg<doutr22::DOUTR22_SPEC>;
#[doc = "MDIOS output data register 22"]
pub mod doutr22;
#[doc = "DOUTR23 (rw) register accessor: MDIOS output data register 23\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr23::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr23::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr23`]
module"]
pub type DOUTR23 = crate::Reg<doutr23::DOUTR23_SPEC>;
#[doc = "MDIOS output data register 23"]
pub mod doutr23;
#[doc = "DOUTR24 (rw) register accessor: MDIOS output data register 24\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr24::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr24::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr24`]
module"]
pub type DOUTR24 = crate::Reg<doutr24::DOUTR24_SPEC>;
#[doc = "MDIOS output data register 24"]
pub mod doutr24;
#[doc = "DOUTR25 (rw) register accessor: MDIOS output data register 25\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr25::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr25::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr25`]
module"]
pub type DOUTR25 = crate::Reg<doutr25::DOUTR25_SPEC>;
#[doc = "MDIOS output data register 25"]
pub mod doutr25;
#[doc = "DOUTR26 (rw) register accessor: MDIOS output data register 26\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr26::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr26::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr26`]
module"]
pub type DOUTR26 = crate::Reg<doutr26::DOUTR26_SPEC>;
#[doc = "MDIOS output data register 26"]
pub mod doutr26;
#[doc = "DOUTR27 (rw) register accessor: MDIOS output data register 27\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr27::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr27::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr27`]
module"]
pub type DOUTR27 = crate::Reg<doutr27::DOUTR27_SPEC>;
#[doc = "MDIOS output data register 27"]
pub mod doutr27;
#[doc = "DOUTR28 (rw) register accessor: MDIOS output data register 28\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr28::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr28::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr28`]
module"]
pub type DOUTR28 = crate::Reg<doutr28::DOUTR28_SPEC>;
#[doc = "MDIOS output data register 28"]
pub mod doutr28;
#[doc = "DOUTR29 (rw) register accessor: MDIOS output data register 29\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr29::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr29::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr29`]
module"]
pub type DOUTR29 = crate::Reg<doutr29::DOUTR29_SPEC>;
#[doc = "MDIOS output data register 29"]
pub mod doutr29;
#[doc = "DOUTR30 (rw) register accessor: MDIOS output data register 30\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr30::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr30::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr30`]
module"]
pub type DOUTR30 = crate::Reg<doutr30::DOUTR30_SPEC>;
#[doc = "MDIOS output data register 30"]
pub mod doutr30;
#[doc = "DOUTR31 (rw) register accessor: MDIOS output data register 31\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr31::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr31::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`doutr31`]
module"]
pub type DOUTR31 = crate::Reg<doutr31::DOUTR31_SPEC>;
#[doc = "MDIOS output data register 31"]
pub mod doutr31;
|
use crate::queue::*;
use crate::responses::*;
use azure_core::errors::AzureError;
use azure_core::prelude::*;
use hyper::StatusCode;
use std::borrow::Cow;
use std::convert::TryInto;
use std::time::Duration;
#[derive(Debug, Clone)]
pub struct PutMessageBuilder<'a, C>
where
C: Client + Clone,
{
queue_client: &'a QueueClient<C>,
message_body: Cow<'a, str>,
visibility_timeout: Option<Duration>,
message_ttl_seconds: u64,
timeout: Option<u64>,
client_request_id: Option<&'a str>,
}
impl<'a, C> PutMessageBuilder<'a, C>
where
C: Client + Clone,
{
pub(crate) fn new<MB: Into<Cow<'a, str>>>(
queue_client: &'a QueueClient<C>,
message_body: MB,
) -> Self {
PutMessageBuilder {
queue_client,
message_body: message_body.into(),
visibility_timeout: None,
message_ttl_seconds: 25200,
timeout: None,
client_request_id: None,
}
}
}
//set mandatory no traits methods
impl<'a, C> MessageBodyRequired for PutMessageBuilder<'a, C>
where
C: Client + Clone,
{
fn message_body(&self) -> &str {
self.message_body.as_ref()
}
}
impl<'a, C> VisibilityTimeoutOption for PutMessageBuilder<'a, C>
where
C: Client + Clone,
{
fn visibility_timeout(&self) -> Option<Duration> {
self.visibility_timeout
}
}
impl<'a, C> MessageTTLRequired for PutMessageBuilder<'a, C>
where
C: Client + Clone,
{
fn message_ttl_seconds(&self) -> u64 {
self.message_ttl_seconds
}
}
impl<'a, C> TimeoutOption for PutMessageBuilder<'a, C>
where
C: Client + Clone,
{
fn timeout(&self) -> Option<u64> {
self.timeout
}
}
impl<'a, C> ClientRequestIdOption<'a> for PutMessageBuilder<'a, C>
where
C: Client + Clone,
{
fn client_request_id(&self) -> Option<&'a str> {
self.client_request_id
}
}
impl<'a, C> VisibilityTimeoutSupport for PutMessageBuilder<'a, C>
where
C: Client + Clone,
{
type O = Self;
fn with_visibility_timeout(self, visibility_timeout: Duration) -> Self::O {
PutMessageBuilder {
queue_client: self.queue_client,
message_body: self.message_body,
visibility_timeout: Some(visibility_timeout),
message_ttl_seconds: self.message_ttl_seconds,
timeout: self.timeout,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C> MessageTTLSupport for PutMessageBuilder<'a, C>
where
C: Client + Clone,
{
type O = Self;
fn with_message_ttl_seconds(self, message_ttl_seconds: u64) -> Self::O {
PutMessageBuilder {
queue_client: self.queue_client,
message_body: self.message_body,
visibility_timeout: self.visibility_timeout,
message_ttl_seconds,
timeout: self.timeout,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C> TimeoutSupport for PutMessageBuilder<'a, C>
where
C: Client + Clone,
{
type O = Self;
fn with_timeout(self, timeout: u64) -> Self::O {
PutMessageBuilder {
queue_client: self.queue_client,
message_body: self.message_body,
visibility_timeout: self.visibility_timeout,
message_ttl_seconds: self.message_ttl_seconds,
timeout: Some(timeout),
client_request_id: self.client_request_id,
}
}
}
impl<'a, C> ClientRequestIdSupport<'a> for PutMessageBuilder<'a, C>
where
C: Client + Clone,
{
type O = Self;
fn with_client_request_id(self, client_request_id: &'a str) -> Self::O {
PutMessageBuilder {
queue_client: self.queue_client,
message_body: self.message_body,
visibility_timeout: self.visibility_timeout,
message_ttl_seconds: self.message_ttl_seconds,
timeout: self.timeout,
client_request_id: Some(client_request_id),
}
}
}
// methods callable regardless
impl<'a, C> PutMessageBuilder<'a, C>
where
C: Client + Clone,
{
pub fn queue_client(&self) -> &'a QueueClient<C> {
self.queue_client
}
pub async fn execute(self) -> Result<PutMessageResponse, AzureError> {
let mut url = url::Url::parse(&format!(
"{}/{}/messages",
self.queue_client.storage_client().queue_uri(),
self.queue_client.queue_name()
))?;
MessageTTLRequired::append_to_url(&self, &mut url);
VisibilityTimeoutOption::append_to_url(&self, &mut url);
TimeoutOption::append_to_url(&self, &mut url);
debug!("url == {:?}", url);
// since the format is fixed we just decorate the message with the tags.
// This could be made optional in the future and/or more
// stringent.
let message = format!(
"<QueueMessage><MessageText>{}</MessageText></QueueMessage>",
self.message_body()
);
debug!("message about to be posted == {}", message);
let perform_request_response = self.queue_client.storage_client().perform_request(
url.as_str(),
&http::Method::POST,
&|mut request| {
request = ClientRequestIdOption::add_header(&self, request);
request
},
Some(message.as_bytes()),
)?;
let (headers, body) = perform_request_response
.check_status_extract_headers_and_body(StatusCode::CREATED)
.await?;
(&headers, &body as &[u8]).try_into()
}
}
|
use std::borrow::Cow;
use crate::item::Item;
use crate::item::RenderContext;
use crate::util;
use eyre::{eyre, Result};
pub struct Sass;
impl Item for Sass {
fn render(&self, ctx: &RenderContext) -> Result<()> {
let sass = grass::from_path("css/main.scss", &grass::Options::default())
.map_err(|err| eyre!("Sass error: {}", err))?;
let output_file = ctx.output_dir.join("css/main.css");
util::write_to_file(&output_file, &sass)
}
fn id(&self) -> Cow<str> {
"sass".into()
}
}
|
#[macro_use]
extern crate log;
use env_logger::Env;
extern crate clap;
use clap::{App, Arg};
#[macro_use]
extern crate serde;
use std::fs;
use std::net::TcpListener;
use std::sync::Arc;
use std::thread;
use crate::server_state::ServerState;
mod commands;
mod conductor;
mod csv_utils;
mod database_state;
mod handler;
mod server_state;
mod sql_parser;
/// Re-export Storage manager here for this crate to use. This allows us to change
/// the storage manager by changing one use statement.
pub use memstore::storage_manager::StorageManager;
#[derive(Deserialize, Debug)]
struct ServerConfig {
host: String,
port: String,
db_path: String,
hf_path: String,
}
/// Entry point for server.
///
/// Waits for user connections and creates a new thread for each connection.
fn main() {
// Configure log environment
env_logger::from_env(Env::default().default_filter_or("debug")).init();
let matches = App::new(env!("CARGO_PKG_NAME"))
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about(env!("CARGO_PKG_DESCRIPTION"))
.arg(
Arg::with_name("config")
.short("c")
.long("config")
.value_name("FILE")
.help("Sets a custom config file")
.takes_value(true)
.required(false),
)
.arg(
Arg::with_name("host")
.short("h")
.long("host")
.value_name("host")
.default_value("127.0.0.1")
.help("Server IP address")
.takes_value(true),
)
.arg(
Arg::with_name("port")
.short("p")
.long("port")
.value_name("port")
.default_value("3333")
.help("Server port number")
.takes_value(true),
)
.arg(
Arg::with_name("db_path")
.short("db")
.long("db_path")
.value_name("db_path")
.default_value("persist/db/")
.help("Path where DB is stored")
.takes_value(true),
)
.arg(
Arg::with_name("hf_path")
.long("hf_path")
.value_name("hf_path")
.default_value("persist/table/")
.help("????")
.takes_value(true),
)
.get_matches();
let config = if let Some(c) = matches.value_of("config") {
let config_path = c;
let contents = fs::read_to_string(config_path).unwrap();
serde_json::from_str(&contents).unwrap()
} else {
let host = matches.value_of("host").unwrap();
let port = matches.value_of("port").unwrap();
let db_path = matches.value_of("db_path").unwrap();
let hf_path = matches.value_of("hf_path").unwrap();
ServerConfig {
host: host.to_string(),
port: port.to_string(),
db_path: db_path.to_string(),
hf_path: hf_path.to_string(),
}
};
info!("Starting crustydb... {:?}", config);
let server_state = Arc::new(ServerState::new(config.db_path, config.hf_path).unwrap());
let mut bind_addr = config.host.clone();
bind_addr.push_str(":");
bind_addr.push_str(&config.port);
let listener = TcpListener::bind(bind_addr).unwrap();
// Accept connections and process them on independent threads.
info!(
"Server listening on with host {} on port {}",
config.host, config.port
);
for stream in listener.incoming() {
match stream {
Ok(stream) => {
debug!("New connection: {}", stream.peer_addr().unwrap());
let server_state = Arc::clone(&server_state);
let _handler = thread::spawn(move || {
// Connection succeeded.
handler::handle_client_request(stream, server_state);
});
}
Err(e) => {
// Connection failed.
error!("Error: {}", e);
}
}
}
// Close the socket server.
drop(listener);
}
|
//! `Tensor` is the equivalent of Tensorflow Tensor.
use ndarray::prelude::*;
use std::fmt;
#[cfg(feature = "serialize")]
use serde::ser::{Serialize, Serializer};
#[derive(Debug, Copy, Clone, PartialEq)]
#[cfg_attr(feature = "serialize", derive(Serialize))]
pub enum DataType {
U8,
I8,
I32,
F32,
F64,
String,
}
impl DataType {
pub fn from_pb(t: &::tfpb::types::DataType) -> ::Result<DataType> {
use tfpb::types::DataType as Tfpb;
match t {
&Tfpb::DT_UINT8 => Ok(DataType::U8),
&Tfpb::DT_INT8 => Ok(DataType::I8),
&Tfpb::DT_INT32 => Ok(DataType::I32),
&Tfpb::DT_FLOAT => Ok(DataType::F32),
&Tfpb::DT_DOUBLE => Ok(DataType::F64),
&Tfpb::DT_STRING => Ok(DataType::String),
_ => Err(format!("Unknown DataType {:?}", t))?,
}
}
pub fn to_pb(&self) -> ::tfpb::types::DataType {
use tfpb::types::DataType as Tfpb;
match self {
DataType::U8 => Tfpb::DT_UINT8,
DataType::I8 => Tfpb::DT_INT8,
DataType::I32 => Tfpb::DT_INT32,
DataType::F32 => Tfpb::DT_FLOAT,
DataType::F64 => Tfpb::DT_DOUBLE,
DataType::String => Tfpb::DT_STRING,
}
}
}
pub trait Datum:
Copy
+ Clone
+ Send
+ Sync
+ fmt::Debug
+ Default
+ 'static
+ ::num_traits::Zero
+ ::num_traits::One
+ ::ndarray::LinalgScalar
+ ::std::ops::AddAssign
+ ::std::ops::MulAssign
+ ::std::ops::DivAssign
+ ::std::ops::SubAssign
+ ::std::ops::RemAssign
{
fn name() -> &'static str;
fn datatype() -> DataType;
fn tensor_into_array(m: Tensor) -> ::Result<ArrayD<Self>>;
fn tensor_to_view(m: &Tensor) -> ::Result<ArrayViewD<Self>>;
fn array_into_tensor(m: ArrayD<Self>) -> Tensor;
}
#[derive(Clone, PartialEq)]
pub enum Tensor {
F32(ArrayD<f32>),
F64(ArrayD<f64>),
I32(ArrayD<i32>),
I8(ArrayD<i8>),
U8(ArrayD<u8>),
String(ArrayD<i8>),
}
impl Tensor {
pub fn from_pb(t: &::tfpb::tensor::TensorProto) -> ::Result<Tensor> {
use tfpb::types::DataType::*;
let dtype = t.get_dtype();
let shape = t.get_tensor_shape();
let dims = shape
.get_dim()
.iter()
.map(|d| d.size as usize)
.collect::<Vec<_>>();
let rank = dims.len();
let content = t.get_tensor_content();
let mat: Tensor = if content.len() != 0 {
match dtype {
DT_FLOAT => Self::from_content::<f32, u8>(dims, content)?.into(),
DT_INT32 => Self::from_content::<i32, u8>(dims, content)?.into(),
_ => unimplemented!("missing type"),
}
} else {
match dtype {
DT_INT32 => Self::from_content::<i32, i32>(dims, t.get_int_val())?.into(),
DT_FLOAT => Self::from_content::<f32, f32>(dims, t.get_float_val())?.into(),
_ => unimplemented!("missing type"),
}
};
assert_eq!(rank, mat.shape().len());
Ok(mat)
}
pub fn from_content<T: Copy, V: Copy>(dims: Vec<usize>, content: &[V]) -> ::Result<ArrayD<T>> {
let value: &[T] = unsafe {
::std::slice::from_raw_parts(
content.as_ptr() as _,
content.len() * ::std::mem::size_of::<V>() / ::std::mem::size_of::<T>(),
)
};
Ok(Array1::from_iter(value.iter().cloned())
.into_shape(dims)?
.into_dyn())
}
pub fn to_pb(&self) -> ::Result<::tfpb::tensor::TensorProto> {
let mut shape = ::tfpb::tensor_shape::TensorShapeProto::new();
let dims = self.shape()
.iter()
.map(|d| {
let mut dim = ::tfpb::tensor_shape::TensorShapeProto_Dim::new();
dim.size = *d as _;
dim
})
.collect();
shape.set_dim(::protobuf::RepeatedField::from_vec(dims));
let mut tensor = ::tfpb::tensor::TensorProto::new();
tensor.set_tensor_shape(shape);
match self {
&Tensor::F32(ref it) => {
tensor.set_dtype(DataType::F32.to_pb());
tensor.set_float_val(it.iter().cloned().collect());
}
&Tensor::F64(ref it) => {
tensor.set_dtype(DataType::F64.to_pb());
tensor.set_double_val(it.iter().cloned().collect());
}
&Tensor::I32(ref it) => {
tensor.set_dtype(DataType::I32.to_pb());
tensor.set_int_val(it.iter().cloned().collect());
}
_ => unimplemented!("missing type"),
}
Ok(tensor)
}
pub fn shape(&self) -> &[usize] {
match self {
&Tensor::F64(ref it) => it.shape(),
&Tensor::F32(ref it) => it.shape(),
&Tensor::I32(ref it) => it.shape(),
&Tensor::I8(ref it) => it.shape(),
&Tensor::U8(ref it) => it.shape(),
_ => unimplemented!("missing type"),
}
}
pub fn datatype(&self) -> DataType {
match self {
&Tensor::F64(_) => DataType::F64,
&Tensor::F32(_) => DataType::F32,
&Tensor::I32(_) => DataType::I32,
&Tensor::I8(_) => DataType::I8,
&Tensor::U8(_) => DataType::U8,
_ => unimplemented!("missing type"),
}
}
pub fn partial_dump(&self, _single_line: bool) -> ::Result<String> {
if self.shape().len() == 0 {
Ok(match self {
&Tensor::I32(ref a) => format!(
"Scalar {:?} {:?}",
self.datatype(),
a.as_slice().unwrap()[0]
),
&Tensor::F32(ref a) => format!(
"Scalar {:?} {:?}",
self.datatype(),
a.as_slice().unwrap()[0]
),
&Tensor::U8(ref a) => format!(
"Scalar {:?} {:?}",
self.datatype(),
a.as_slice().unwrap()[0]
),
_ => unimplemented!("missing type"),
})
} else if self.shape().iter().product::<usize>() > 8 {
Ok(format!("shape:{:?} {:?}", self.shape(), self.datatype()))
} else {
Ok(match self {
&Tensor::I32(ref a) => format!("{:?} {:?}", self.datatype(), a).replace("\n", " "),
&Tensor::F32(ref a) => format!("{:?} {:?}", self.datatype(), a).replace("\n", " "),
&Tensor::U8(ref a) => format!("{:?} {:?}", self.datatype(), a).replace("\n", " "),
_ => unimplemented!("missing type"),
})
}
}
fn to_f32(&self) -> Tensor {
match self {
&Tensor::I32(ref data) => Tensor::F32(data.map(|&a| a as f32)),
&Tensor::F32(_) => self.clone(),
_ => unimplemented!("missing type"),
}
}
pub fn close_enough(&self, other: &Self) -> bool {
let ma = self.to_f32().take_f32s().unwrap();
let mb = other.to_f32().take_f32s().unwrap();
let avg = ma.iter().map(|&a| a.abs()).sum::<f32>() / ma.len() as f32;
let dev = (ma.iter().map(|&a| (a - avg).powi(2)).sum::<f32>() / ma.len() as f32).sqrt();
let margin = (dev / 10.0).max(avg.abs() / 10_000.0);
ma.shape() == mb.shape()
&& mb.iter()
.zip(ma.iter())
.all(|(&a, &b)| (b - a).abs() <= margin)
}
}
impl fmt::Debug for Tensor {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
let content = self.partial_dump(true).unwrap_or("Error".to_string());
write!(formatter, "Tensor {}", content)
}
}
pub trait CastFrom<T>
where
Self: Sized,
{
fn cast_from(value: T) -> Option<Self>;
}
pub trait CastInto<U> {
fn cast_into(self) -> Option<U>;
}
impl<T, U> CastInto<U> for T
where
U: CastFrom<T>,
{
fn cast_into(self) -> Option<U> {
U::cast_from(self)
}
}
#[cfg(feature = "serialize")]
impl Serialize for Tensor {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
macro_rules! serialize_inner {
($type:ident, $m:ident) => {{
let data = (
stringify!($type),
self.shape(),
$m.iter().cloned().collect::<Vec<_>>(),
);
data.serialize(serializer)
}};
};
use Tensor::*;
match self {
F32(m) => serialize_inner!(f32, m),
F64(m) => serialize_inner!(f64, m),
I32(m) => serialize_inner!(i32, m),
I8(m) => serialize_inner!(i8, m),
U8(m) => serialize_inner!(u8, m),
String(m) => serialize_inner!(str, m),
}
}
}
macro_rules! tensor {
($t:ident, $v:ident, $as:ident, $take:ident, $make:ident) => {
impl<D: ::ndarray::Dimension> From<Array<$t, D>> for Tensor {
fn from(it: Array<$t, D>) -> Tensor {
Tensor::$v(it.into_dyn())
}
}
impl Tensor {
pub fn $as(&self) -> Option<&ArrayD<$t>> {
if let &Tensor::$v(ref it) = self {
Some(it)
} else {
None
}
}
pub fn $take(self) -> Option<ArrayD<$t>> {
if let Tensor::$v(it) = self {
Some(it)
} else {
None
}
}
pub fn $make(shape: &[usize], values: &[$t]) -> ::Result<Tensor> {
Ok(Array::from_shape_vec(shape, values.to_vec())?.into())
}
}
impl CastFrom<Tensor> for ArrayD<$t> {
fn cast_from(mat: Tensor) -> Option<ArrayD<$t>> {
if let Tensor::$v(it) = mat {
Some(it)
} else {
None
}
}
}
impl Datum for $t {
fn name() -> &'static str {
stringify!($t)
}
fn datatype() -> DataType {
DataType::$v
}
fn tensor_into_array(m: Tensor) -> ::Result<ArrayD<Self>> {
m.$take().ok_or("unmatched data type".into())
}
fn tensor_to_view(m: &Tensor) -> ::Result<ArrayViewD<Self>> {
m.$as()
.map(|m| m.view())
.ok_or("unmatched data type".into())
}
fn array_into_tensor(m: ArrayD<Self>) -> Tensor {
Tensor::from(m)
}
}
};
}
tensor!(f64, F64, as_f64s, take_f64s, f64s);
tensor!(f32, F32, as_f32s, take_f32s, f32s);
tensor!(i32, I32, as_i32s, take_i32s, i32s);
tensor!(u8, U8, as_u8s, take_u8s, u8s);
tensor!(i8, I8, as_i8s, take_i8s, i8s);
#[macro_export]
macro_rules! map_tensor {
($tensor:expr, | $array:ident | $return:expr) => {{
use Tensor::*;
match $tensor {
F64($array) => F64($return),
F32($array) => F32($return),
I32($array) => I32($return),
I8($array) => I8($return),
U8($array) => U8($return),
String($array) => String($return),
}
}};
}
|
use super::{api::*, util::*};
use crate::{Error, PrefixContext};
use std::borrow::Cow;
/// Run code and detect undefined behavior using Miri
#[poise::command(
prefix_command,
track_edits,
broadcast_typing,
explanation_fn = "miri_help"
)]
pub async fn miri(
ctx: PrefixContext<'_>,
flags: poise::KeyValueArgs,
code: poise::CodeBlock,
) -> Result<(), Error> {
let code = &maybe_wrap(&code.code, ResultHandling::Discard);
let (flags, flag_parse_errors) = parse_flags(flags);
let mut result: PlayResult = ctx
.data
.http
.post("https://play.rust-lang.org/miri")
.json(&MiriRequest {
code,
edition: flags.edition,
})
.send()
.await?
.json()
.await?;
result.stderr = extract_relevant_lines(
&result.stderr,
&["Running `/playground"],
&["error: aborting"],
)
.to_owned();
send_reply(ctx, result, code, &flags, &flag_parse_errors).await
}
pub fn miri_help() -> String {
generic_help(GenericHelp {
command: "miri",
desc: "Execute this program in the Miri interpreter to detect certain cases of undefined \
behavior (like out-of-bounds memory access)",
mode_and_channel: false,
// Playgrounds sends miri warnings/errors and output in the same field so we can't filter
// warnings out
warn: false,
example_code: "code",
})
}
/// Expand macros to their raw desugared form
#[poise::command(
prefix_command,
broadcast_typing,
track_edits,
explanation_fn = "expand_help"
)]
pub async fn expand(
ctx: PrefixContext<'_>,
flags: poise::KeyValueArgs,
code: poise::CodeBlock,
) -> Result<(), Error> {
let code = maybe_wrap(&code.code, ResultHandling::None);
let was_fn_main_wrapped = matches!(code, Cow::Owned(_));
let (flags, flag_parse_errors) = parse_flags(flags);
let mut result: PlayResult = ctx
.data
.http
.post("https://play.rust-lang.org/macro-expansion")
.json(&MacroExpansionRequest {
code: &code,
edition: flags.edition,
})
.send()
.await?
.json()
.await?;
result.stderr = extract_relevant_lines(
&result.stderr,
&["Finished ", "Compiling playground"],
&["error: aborting"],
)
.to_owned();
if result.success {
match apply_online_rustfmt(ctx, &result.stdout, flags.edition).await {
Ok(PlayResult { success: true, stdout, .. }) => result.stdout = stdout,
Ok(PlayResult { success: false, stderr, .. }) => log::warn!("Huh, rustfmt failed even though this code successfully passed through macro expansion before: {}", stderr),
Err(e) => log::warn!("Couldn't run rustfmt: {}", e),
}
}
if was_fn_main_wrapped {
result.stdout = strip_fn_main_boilerplate_from_formatted(&result.stdout);
}
send_reply(ctx, result, &code, &flags, &flag_parse_errors).await
}
pub fn expand_help() -> String {
generic_help(GenericHelp {
command: "expand",
desc: "Expand macros to their raw desugared form",
mode_and_channel: false,
warn: false,
example_code: "code",
})
}
/// Catch common mistakes using the Clippy linter
#[poise::command(
prefix_command,
broadcast_typing,
track_edits,
explanation_fn = "clippy_help"
)]
pub async fn clippy(
ctx: PrefixContext<'_>,
flags: poise::KeyValueArgs,
code: poise::CodeBlock,
) -> Result<(), Error> {
let code = &maybe_wrap(&code.code, ResultHandling::Discard);
let (flags, flag_parse_errors) = parse_flags(flags);
let mut result: PlayResult = ctx
.data
.http
.post("https://play.rust-lang.org/clippy")
.json(&ClippyRequest {
code,
edition: flags.edition,
crate_type: if code.contains("fn main") {
CrateType::Binary
} else {
CrateType::Library
},
})
.send()
.await?
.json()
.await?;
result.stderr = extract_relevant_lines(
&result.stderr,
&["Checking playground", "Running `/playground"],
&[
"error: aborting",
"1 warning emitted",
"warnings emitted",
"Finished ",
],
)
.to_owned();
send_reply(ctx, result, code, &flags, &flag_parse_errors).await
}
pub fn clippy_help() -> String {
generic_help(GenericHelp {
command: "clippy",
desc: "Catch common mistakes and improve the code using the Clippy linter",
mode_and_channel: false,
warn: false,
example_code: "code",
})
}
/// Format code using rustfmt
#[poise::command(
prefix_command,
broadcast_typing,
track_edits,
explanation_fn = "fmt_help"
)]
pub async fn fmt(
ctx: PrefixContext<'_>,
flags: poise::KeyValueArgs,
code: poise::CodeBlock,
) -> Result<(), Error> {
let code = &maybe_wrap(&code.code, ResultHandling::None);
let was_fn_main_wrapped = matches!(code, Cow::Owned(_));
let (flags, flag_parse_errors) = parse_flags(flags);
let mut result = apply_online_rustfmt(ctx, code, flags.edition).await?;
if was_fn_main_wrapped {
result.stdout = strip_fn_main_boilerplate_from_formatted(&result.stdout);
}
send_reply(ctx, result, code, &flags, &flag_parse_errors).await
}
pub fn fmt_help() -> String {
generic_help(GenericHelp {
command: "fmt",
desc: "Format code using rustfmt",
mode_and_channel: false,
warn: false,
example_code: "code",
})
}
|
mod ui;
use std::{fs, path::{Path, PathBuf}};
use std::sync::{Arc, Mutex};
use console::Term;
use dialoguer::Select;
use dialoguer::theme::ColorfulTheme;
use clap::{App, Arg, ArgMatches};
use notto::{Notto, io::browser::NottoPath};
use notto::models::note::Note;
use notto::errors::NottoError;
use notto::finder::FindCondition;
use notto::finder::NoteFindMessage;
fn main() {
let matches = App::new("notto")
.author("Nico")
.subcommand(App::new("new")
.about("Creates a new note, you can add the note name in the note hierarchy. Examples:\n`notto new`\n`notto new meeting_minutes`\n`notto new work/resources`")
.arg(Arg::new("name")
.about("Name of the note file, no need to add extension, you can use the note/subnote/notename to nest notes")
.index(1))
.arg(Arg::new("journal")
.about("Add an entry under a Y/M/D directory structure")
.short('j')
.long("journal")
.required(false)
.takes_value(false))
)
.subcommand(App::new("open")
.about("Opens a note"))
.subcommand(App::new("find")
.about("Finds a note")
.arg(Arg::new("text")
.about("Text to find in the note")
.index(1)
.required(true)))
.get_matches();
match matches.subcommand() {
Some(("new", matches)) => new(matches),
Some(("open", matches)) => { open(matches); },
Some(("find", matches)) => { find(matches); },
Some(_) => {}
None => {}
};
}
fn new(matches: &ArgMatches) {
let notto = Notto::new().unwrap();
let note_name = matches.value_of("name");
if matches.is_present("journal") {
match notto.create_journal_entry(note_name) {
Ok(path) => println!("Saved note at {}", path.to_string_lossy()),
Err(e) => println!("Error creating note: {}", e)
}
} else {
match notto.create_or_open_note_at(note_name) {
Ok(path) => println!("Saved note at {}", path.to_string_lossy()),
Err(e) => println!("Error creating note: {}", e)
}
}
}
fn open(matches: &ArgMatches) -> Result<(), NottoError> {
let notto = Notto::new()?;
if let Some(note_path) = display_selection_for_path(¬to, &NottoPath::new())? {
notto.open_by_path(note_path)?;
}
Ok(())
}
fn find(matches: &ArgMatches) -> Result<(), NottoError> {
let notto = Notto::new()?;
let find_text = matches.value_of("text");
if let Some(text) = find_text {
let mut conditions = vec![];
conditions.push(FindCondition::Text(text.to_string()));
let rx = notto.find(conditions)?;
let theme = ColorfulTheme::default();
let mut selection = Select::with_theme(&theme);
match rx.try_recv() {
Ok(msg) => {
if let NoteFindMessage::Result(note_result) = msg {
/*selection.item(PathEntry {
name: note_result.note.get_title(),
path: note_result.path,
is_dir: false});*/
}
},
Err(e) => {}
}
selection.interact_on_opt(&Term::stderr())?;
}
Ok(())
}
fn display_selection_for_path(notto: &Notto, path: &NottoPath) -> Result<Option<NottoPath>, NottoError> {
let path_string: String = path.into();
let items = notto.browse(path)?;
let selection = Select::with_theme(&ColorfulTheme::default())
.with_prompt(path_string)
.items(&items)
.default(0)
.interact_on_opt(&Term::stderr())?;
if let Some(select) = selection {
if let Some(item) = items.get(select) {
let path = &item.path;
if item.is_dir() {
return display_selection_for_path(notto, path)
} else {
return Ok(Some(path.clone()));
}
}
}
Ok(None)
}
|
use tiny_http::{Server, Response};
fn main() {
let server = Server::http("0.0.0.0:8080").unwrap();
for request in server.incoming_requests() {
println!("received request! : {:?}", request.url());
let response = Response::from_string("hello world");
request.respond(response).unwrap();
}
}
|
use std::time::Duration;
#[derive(Clone, Copy, Debug)]
pub enum Move {
Up, Down, Left, Right
}
#[derive(Clone, Debug)]
pub enum Setting {
Timebank(Duration),
TimePerMove(Duration),
BotName(String),
BotId(u32),
FieldWidth(u32),
FieldHeight(u32),
PlayerNames { player1: String, player2: String },
}
|
use super::*;
use crate::helpers::construction::constraints::create_constraint_pipeline_with_module;
use crate::helpers::models::domain::create_empty_solution_context;
use crate::helpers::models::problem::test_single_with_id;
use crate::helpers::models::solution::*;
parameterized_test! {can_estimate_value, (value, max_cost, expected), {
can_estimate_value_impl(value, max_cost, expected);
}}
can_estimate_value! {
case_01: (100., 1000., -10.),
case_02: (50., 1000., -5.),
case_03: (50., 100., -0.5),
case_04: (100., 0., -10.),
case_05: (50., 0., -5.),
}
fn can_estimate_value_impl(value: f64, max_cost: f64, expected: f64) {
let (constraint, _) = TotalValue::maximize(1000., 0.1, Arc::new(move |_| value));
let constraint = create_constraint_pipeline_with_module(constraint);
let mut route_ctx = create_empty_route_ctx();
route_ctx.state_mut().put_route_state(TOTAL_VALUE_KEY, max_cost);
let solution_ctx = create_empty_solution_context();
let result = constraint.evaluate_soft_route(&solution_ctx, &route_ctx, &Job::Single(test_single_with_id("job")));
assert_eq!(result, expected);
}
|
#![allow(non_snake_case)]
#![allow(unused_doc_comments)]
//! # consul-rust
//! ```rust
//! #![allow(unused_must_use)]
//! use consul::Client;
//! use consul::Config;
//! use consul::catalog::Catalog;
//!
//! fn main(){
//! let mut config = Config::new().unwrap();
//! config.address = String::from("http://localhost:8500");
//! let client = Client::new(config);
//! let nodes = client.nodes(None);
//! nodes.map(|(nodes, _)|{
//! println!("nodes: {:?}", nodes);
//! for node in nodes.iter() {
//! println!("node {}: {:?}", node.ID, client.node(node.ID.as_str(), None));
//! }
//! });
//!
//! let res = client.services(None);
//! res.map(|(m, _)|{
//! println!("services: {:?}", m);
//! for key in m.keys() {
//! let service = client.service(key.as_str(), None);
//! println!("service {}: {:?}", key, service);
//! }
//! });
//! }
//! ```
#[macro_use]
extern crate error_chain;
#[macro_use]
extern crate serde_derive;
pub mod agent;
pub mod catalog;
pub mod connect_ca;
pub mod errors;
pub mod health;
pub mod kv;
pub mod session;
mod request;
use std::time::Duration;
use reqwest::ClientBuilder;
use reqwest::Client as HttpClient;
use errors::{Result, ResultExt};
#[derive(Clone, Debug)]
pub struct Client {
config: Config,
}
impl Client {
pub fn new(config: Config) -> Self {
Client { config: config }
}
}
#[derive(Clone, Debug)]
pub struct Config {
pub address: String,
pub datacenter: Option<String>,
pub http_client: HttpClient,
pub wait_time: Option<Duration>,
}
impl Config {
pub fn new() -> Result<Config> {
ClientBuilder::new()
.build()
.chain_err(|| "Failed to build reqwest client")
.map(|client| Config {
address: String::from("http://localhost:8500"),
datacenter: None,
http_client: client,
wait_time: None,
})
}
}
#[derive(Clone, Debug, Default)]
pub struct QueryOptions {
pub datacenter: Option<String>,
pub wait_index: Option<u64>,
pub wait_time: Option<Duration>,
}
#[derive(Clone, Debug)]
pub struct QueryMeta {
pub last_index: Option<u64>,
pub request_time: Duration,
}
#[derive(Clone, Debug, Default)]
pub struct WriteOptions {
pub datacenter: Option<String>,
}
#[derive(Clone, Debug)]
pub struct WriteMeta {
pub request_time: Duration,
}
|
#![cfg_attr(feature="cargo-clippy", allow(let_unit_value))]
extern crate actix;
extern crate futures;
extern crate tokio_core;
use std::time::Duration;
use futures::{future, Future};
use tokio_core::reactor::Timeout;
use actix::prelude::*;
use actix::msgs::SystemExit;
#[derive(Debug, PartialEq)]
enum Op {
Cancel,
Timeout,
TimeoutStop,
RunAfter,
RunAfterStop,
}
struct MyActor{op: Op}
impl Actor for MyActor {
type Context = actix::Context<Self>;
fn started(&mut self, ctx: &mut Context<MyActor>) {
match self.op {
Op::Cancel => {
let handle = ctx.notify_later(TimeoutMessage, Duration::new(0, 100));
ctx.cancel_future(handle);
},
Op::Timeout => {
ctx.notify_later(TimeoutMessage, Duration::new(0, 1000));
},
Op::TimeoutStop => {
ctx.notify_later(TimeoutMessage, Duration::new(0, 100_000));
ctx.stop();
},
Op::RunAfter => {
ctx.run_later(Duration::new(0, 100), |_, _| {
Arbiter::system().send(SystemExit(0));
});
}
Op::RunAfterStop => {
ctx.run_later(Duration::new(1, 0), |_, _| {
panic!("error");
});
ctx.stop();
}
}
}
fn stopped(&mut self, _: &mut Context<MyActor>) {
Arbiter::system().send(SystemExit(0));
}
}
struct TimeoutMessage;
impl ResponseType for TimeoutMessage {
type Item = ();
type Error = ();
}
impl Handler<TimeoutMessage> for MyActor {
type Result = ();
fn handle(&mut self, _: TimeoutMessage, _: &mut Self::Context) {
if self.op != Op::Timeout {
assert!(false, "should not happen {:?}", self.op);
}
Arbiter::system().send(SystemExit(0));
}
}
#[test]
fn test_add_timeout() {
let sys = System::new("test");
let _addr: Address<_> = MyActor{op: Op::Timeout}.start();
sys.run();
}
#[test]
fn test_add_timeout_cancel() {
let sys = System::new("test");
let _addr: Address<_> = MyActor{op: Op::Cancel}.start();
Arbiter::handle().spawn(
Timeout::new(Duration::new(0, 1000), Arbiter::handle()).unwrap()
.then(|_| {
Arbiter::system().send(SystemExit(0));
future::result(Ok(()))
})
);
sys.run();
}
#[test]
fn test_add_timeout_stop() {
let sys = System::new("test");
let _addr: Address<_> = MyActor{op: Op::TimeoutStop}.start();
sys.run();
}
#[test]
fn test_run_after() {
let sys = System::new("test");
let _addr: Address<_> = MyActor{op: Op::RunAfter}.start();
sys.run();
}
#[test]
fn test_run_after_stop() {
let sys = System::new("test");
let _addr: Address<_> = MyActor{op: Op::RunAfterStop}.start();
sys.run();
}
|
use embedded_graphics_core::pixelcolor::raw::RawU2;
use embedded_graphics_core::prelude::*;
use crate::epd::*;
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
pub enum EpdColor {
White,
Black,
Red,
}
impl EpdColor {
fn black_bit(&self) -> u8 {
match self {
EpdColor::Black => 0,
_ => 1,
}
}
fn red_bit(&self) -> u8 {
match self {
EpdColor::Red => 0,
_ => 1,
}
}
}
impl PixelColor for EpdColor {
type Raw = RawU2;
}
impl From<RawU2> for EpdColor {
fn from(data: RawU2) -> Self {
match data.into_inner() {
0b00 => Self::White,
0b01 => Self::Black,
0b10 => Self::Red,
0b11 => panic!("unused RawU2 value 0b11"),
_ => unreachable!(),
}
}
}
impl<SPI, RST, BUSY, ECS, DC> OriginDimensions for Epd<SPI, RST, BUSY, ECS, DC> {
fn size(&self) -> Size {
Size::new(152, 152)
}
}
impl<SPI, RST, BUSY, ECS, DC> DrawTarget for Epd<SPI, RST, BUSY, ECS, DC> {
type Color = EpdColor;
type Error = core::convert::Infallible;
fn draw_iter<I>(&mut self, pixels: I) -> Result<(), Self::Error>
where
I: IntoIterator<Item = Pixel<Self::Color>>,
{
for Pixel(coord, color) in pixels.into_iter() {
let Point { x, y } = coord;
let position = y * 152 + x;
let index = (position / 8) as usize;
let bit = 7 - position % 8;
self.framebuffer_black[index] &= !(1 << bit);
self.framebuffer_black[index] |= color.black_bit() << bit;
self.framebuffer_red[index] &= !(1 << bit);
self.framebuffer_red[index] |= color.red_bit() << bit;
}
Ok(())
}
}
|
use std::collections::{HashMap, VecDeque};
use std::vec::IntoIter;
use either::Either;
pub use parser::parse;
mod parser;
type VarOrNum = Either<String, i32>;
#[derive(Clone, Debug)]
pub enum Statement {
Assignment(String, VarOrNum),
AddAssignment(String, VarOrNum),
Print(String),
Lock,
UnLock,
Yield,
End,
}
pub struct Process {
pid: usize,
statements: IntoIter<Statement>,
environment: HashMap<String, i32>,
time_slice: usize,
}
impl Process {
fn new(pid: usize, statements: IntoIter<Statement>, time_slice: usize) -> Self {
Self {
pid,
statements,
environment: Default::default(),
time_slice,
}
}
fn next_statement(&mut self) -> Option<Statement> {
self.statements.next()
}
fn assign(&mut self, var: String, right: VarOrNum) {
let val = match right {
Either::Left(var) => self.environment.get(&var).cloned().unwrap_or(0),
Either::Right(v) => v,
};
self.environment.insert(var, val);
}
fn add_assign(&mut self, var: String, right: VarOrNum) {
let val = self.environment.get(&var).cloned().unwrap_or(0)
+ match right {
Either::Left(var) => self.environment.get(&var).cloned().unwrap_or(0),
Either::Right(v) => v,
};
self.environment.insert(var, val);
}
fn print(&self, var: String) {
println!(
"{}: {}",
self.pid,
self.environment.get(&var).cloned().unwrap_or(0)
);
}
}
pub struct Os {
stmt_exec_time_cfg: [usize; 7],
time_slice: usize,
ready_queue: VecDeque<Process>,
blocking_queue: VecDeque<Process>,
is_locked: bool,
}
impl Os {
pub fn new(
stmt_exec_time_cfg: [usize; 7],
time_slice: usize,
statements: Vec<Statement>,
) -> Self {
let ready_queue = statements
.split(|s| matches!(s, Statement::End))
.map(|s| s.to_vec().into_iter())
.enumerate()
.map(|(pid, statements)| Process::new(pid + 1, statements, time_slice))
.collect();
Self {
stmt_exec_time_cfg,
time_slice,
ready_queue,
blocking_queue: Default::default(),
is_locked: false,
}
}
pub fn run(mut self) {
while let Some(mut running_process) = self.ready_queue.pop_front() {
while let Some(stmt) = running_process.next_statement() {
running_process.time_slice = running_process
.time_slice
.saturating_sub(self.statement_execution_time(&stmt));
match stmt {
Statement::Assignment(var, right) => running_process.assign(var, right),
Statement::AddAssignment(var, right) => running_process.add_assign(var, right),
Statement::Print(var) => running_process.print(var),
Statement::Lock => {
if self.is_locked {
running_process.time_slice = self.time_slice;
self.blocking_queue.push_back(running_process);
break;
} else {
self.is_locked = true;
}
}
Statement::UnLock => {
if let Some(p) = self.blocking_queue.pop_front() {
self.ready_queue.push_front(p);
}
}
Statement::Yield => {
running_process.time_slice = 0;
}
Statement::End => break,
}
if running_process.time_slice == 0 {
running_process.time_slice = self.time_slice;
self.ready_queue.push_back(running_process);
break;
}
}
}
}
fn statement_execution_time(&self, statement: &Statement) -> usize {
match statement {
Statement::Assignment(_, _) => self.stmt_exec_time_cfg[0],
Statement::AddAssignment(_, _) => self.stmt_exec_time_cfg[1],
Statement::Print(_) => self.stmt_exec_time_cfg[2],
Statement::Lock => self.stmt_exec_time_cfg[3],
Statement::UnLock => self.stmt_exec_time_cfg[4],
Statement::Yield => self.stmt_exec_time_cfg[5],
Statement::End => self.stmt_exec_time_cfg[6],
}
}
}
|
macro_rules! format_err {
($span:expr, $msg:expr $(,)?) => {
syn::Error::new_spanned(&$span as &dyn quote::ToTokens, &$msg as &dyn core::fmt::Display)
};
($span:expr, $($tt:tt)*) => {
format_err!($span, format!($($tt)*))
};
}
macro_rules! bail {
($($tt:tt)*) => {
return Err(format_err!($($tt)*))
};
}
|
use crate::errors::ConnectorXPythonError;
use arrow2::{
array::ArrayRef,
chunk::Chunk,
datatypes::{Field, Schema},
ffi,
};
use connectorx::source_router::SourceConn;
use connectorx::{prelude::*, sql::CXQuery};
use fehler::throws;
use libc::uintptr_t;
use pyo3::prelude::*;
use pyo3::{PyAny, Python};
use std::sync::Arc;
#[throws(ConnectorXPythonError)]
pub fn write_arrow<'a>(
py: Python<'a>,
source_conn: &SourceConn,
origin_query: Option<String>,
queries: &[CXQuery<String>],
) -> &'a PyAny {
let destination = get_arrow2(source_conn, origin_query, queries)?;
let (rbs, schema) = destination.arrow()?;
let ptrs = to_ptrs(rbs, schema);
let obj: PyObject = ptrs.into_py(py);
obj.into_ref(py)
}
fn to_ptrs(
rbs: Vec<Chunk<ArrayRef>>,
schema: Arc<Schema>,
) -> (Vec<String>, Vec<Vec<(uintptr_t, uintptr_t)>>) {
if rbs.is_empty() {
return (vec![], vec![]);
}
let mut result = vec![];
let names = schema.fields.iter().map(|f| f.name.clone()).collect();
for rb in rbs {
let mut cols = vec![];
for array in rb.columns() {
let array_ptr = Box::new(ffi::ArrowArray::empty());
let schema_ptr = Box::new(ffi::ArrowSchema::empty());
let array_ptr = Box::into_raw(array_ptr);
let schema_ptr = Box::into_raw(schema_ptr);
unsafe {
ffi::export_field_to_c(
&Field::new("", array.data_type().clone(), true),
schema_ptr,
);
ffi::export_array_to_c(array.clone(), array_ptr);
};
cols.push((array_ptr as uintptr_t, schema_ptr as uintptr_t));
}
result.push(cols);
}
(names, result)
}
|
//! Infrastructure for tracking explanations of differential dataflow computations.
#[allow(unused_variables)]
extern crate fnv;
extern crate rand;
extern crate time;
extern crate timely;
extern crate timely_sort;
extern crate graph_map;
extern crate differential_dataflow;
use std::rc::Rc;
use std::hash::Hash;
use timely::progress::Timestamp;
use timely::dataflow::*;
use timely::dataflow::scopes::Child;
use timely::dataflow::operators::*;
use timely::dataflow::operators::feedback::Handle;
use timely::progress::timestamp::RootTimestamp;
use timely::progress::nested::product::Product;
use timely_sort::Unsigned;
use differential_dataflow::{Data, Collection, Delta};
use differential_dataflow::operators::*;
use differential_dataflow::lattice::Lattice;
/// A explanation-tracking collection.
///
/// A `Variable` represents a differential dataflow collection, but also two additional collections corresponding to
///
/// * Those elements required as part of explaining some outputs, and
/// * Those elements currently reproduced using explanatory inputs.
///
/// A `Variable` supports many of the same operations that a `Collection` supports, which perform additional work to
/// maintain the explanation dataflow infrastructure. Several methods are currently macros, because I haven't yet
/// sorted out how best to write their type signatures (e.g. `group` and `min` need to be generic over timestamps in
/// an odd, probably HKT, sort of way).
pub struct Variable<'a, G, K, V, Gp>
where
G: Scope,
K: Data+Default,
V: Data+Default,
Gp: Scope<Timestamp=Product<Product<RootTimestamp, u32>, u32>>,
G::Timestamp: Ord+Hash {
/// The collection itself.
pub stream: Collection<G, (K, V)>,
/// A collection of elements produced by explanatory inputs.
pub working: Collection<G, (K, V)>,
/// A collection of elements required for explanation.
pub depends: MonotonicVariable<'a, Gp, (K, V, G::Timestamp, u32)>,
}
impl<'a,
G: Scope,
K: Data+Default,
V: Data+Default,
Gp: Scope<Timestamp=Product<Product<RootTimestamp, u32>, u32>>>
Variable<'a, G, K, V, Gp> where G::Timestamp: Ord+Hash {
/// Constructs a new `Variable` from collections and the explanation-tracking scope.
pub fn new(
source: Collection<G, (K, V)>,
working: Collection<G, (K, V)>,
prov: &mut Child<'a, Gp, u32>) -> Variable<'a, G, K, V, Gp> {
Variable {
stream: source,
working: working,
depends: MonotonicVariable::new(prov),
}
}
}
#[macro_export]
macro_rules! lift {
($stream:expr) => {{
Collection::new(
$stream.consolidate()
.inner
.unary_stream(timely::dataflow::channels::pact::Pipeline, "lifting", |input, output| {
while let Some((time, data)) = input.next() {
let mut session = output.session(&time);
for &(ref datum, _weight) in data.iter() {
session.give(((datum.clone(), time.time()), 1));
}
}
})
)
}}
}
impl<'a, G, K, V, Gp> Variable<'a, G, K, V, Gp> where
G: Scope,
K: Data+Default,
V: Data+Default,
Gp: Scope<Timestamp=Product<Product<RootTimestamp, u32>, u32>>,
G::Timestamp: Ord+Hash+Lattice {
/// Joins two collections using an unsigned key.
pub fn join_u<V2>(&mut self, other: &mut Variable<'a, G, K, V2, Gp>) -> Variable<'a, G, K, (V, V2), Gp>
where K : Unsigned, V2: Unsigned+Default+Data {
let result = Variable::new(
self.stream.join_u(&other.stream).map(|(x,y,z)| (x,(y,z))),
self.working.join_u(&other.working).map(|(x,y,z)| (x,(y,z))),
&mut self.depends.scope()
);
// add each component of joined results to the requirements of each input
self.depends.add(&result.depends.stream.map(|(x,(y,_),t,q)| (x,y,t,q)));
other.depends.add(&result.depends.stream.map(|(x,(_,z),t,q)| (x,z,t,q)));
result
}
/// Maps elements of one collection to another using an invertible function (and its inverse).
pub fn map_inverse<K2: Data+Default,
V2: Data+Default,
F1: Fn((K,V))->(K2,V2)+'static,
F2: Fn((K2,V2))->(K,V)+'static>(&mut self, logic: F1, inverse: F2) ->
Variable<'a, G, K2, V2, Gp>
{
let forward = Rc::new(logic);
let clone1 = forward.clone();
let clone2 = forward.clone();
let result = Variable::new(
self.stream.map(move |x| clone1(x)),
self.working.map(move |x| clone2(x)),
&mut self.depends.scope()
);
self.depends.add(&result.depends.stream.map(move |(k2,v2,t,u)| {
let (k, v) = inverse((k2, v2));
(k, v, t, u)
}));
result
}
/// Concatenates two collections.
pub fn concat(&mut self, other: &mut Variable<'a, G, K, V, Gp>) -> Variable<'a, G, K, V, Gp> {
let result = Variable::new(
self.stream.concat(&other.stream),
self.working.concat(&other.working),
&mut self.depends.scope()
);
self.depends.add(&result.depends.stream);
other.depends.add(&result.depends.stream);
result
}
/// Concatenates two collections.
pub fn except(&mut self, other: &mut Variable<'a, G, K, V, Gp>) -> Variable<'a, G, K, V, Gp> {
let result = Variable::new(
self.stream.concat(&other.stream.negate()),
self.working.concat(&other.working.negate()),
&mut self.depends.scope()
);
self.depends.add(&result.depends.stream);
other.depends.add(&result.depends.stream);
result
}
/// Brings a collection from an outer scope into a child scope.
pub fn enter<'b, T: Timestamp+Data>(&mut self, child: &Child<'b, G, T>) -> Variable<'a, Child<'b,G,T>, K, V, Gp> {
let result = Variable::new( self.stream.enter(child), self.working.enter(child), &mut self.depends.scope() );
self.depends.add(&result.depends.stream.map(|(x,y,t,q)| (x,y,t.outer,q)));
result
}
/// Brings a collection from an outer scope into a child scope, each element at its own timestamp.
pub fn enter_at<'b, T, F>(&mut self, child: &Child<'b,G, T>, at: F) -> Variable<'a, Child<'b,G,T>, K, V, Gp>
where T: Timestamp+Data, F: Fn(&((K,V), Delta))->T+'static {
let at = Rc::new(at);
let clone1 = at.clone();
let clone2 = at.clone();
let result = Variable::new(
self.stream.enter_at(child, move |x| clone1(x)),
self.working.enter_at(child, move |x| clone2(x)),
&mut self.depends.scope()
);
self.depends.add(&result.depends.stream.map(|(x,y,t,q)| (x,y,t.outer,q)));
result
}
pub fn consolidate(&mut self) -> Self {
let result = Variable::new(
self.stream.consolidate(),
self.working.consolidate(),
&mut self.depends.scope()
);
self.depends.add(&result.depends.stream);
result
}
}
#[macro_export]
macro_rules! min {
($var:expr, $logic:expr, $scope:expr) => {{
// compute the minimums for both the actual and working data collections.
let min1 = $var.stream.group_u(|_k, s, t| t.push(((*s.next().unwrap().0), 1)));
let min2 = $var.working.group_u(|_k, s, t| t.push(((*s.next().unwrap().0), 1)));
// construct a new variable from these minimums.
let var_min = Variable::new(
min1.map(|(k,v)| (k,$logic(v))),
min2.map(|(k,v)| (k,$logic(v))),
&mut $scope
);
// extract minimums and presents them as explainable data, in the explanation scope.
let temp = lift!(min1.concat(&min2)).leave().enter(&$scope).map(|((x,val),t)| (x,(val,t)));
// set explanation requirements from requests by
// (i) joining requests against actual minimums,
// (ii) filtering records to only those with less or equal time,
// (iii) filtering records to only those with less or equal value,
$var.depends.add(
&temp.join_u(&var_min.depends.stream.map(|(x,l,t,q)| (x,(l,t,q)))) // (i)
.filter(|&(_,(_,t1),(_,t2,_))| t1 <= t2) // (ii)
.filter(|&(_,(val,_),(l2,_,_))| $logic(val) <= l2) // (iii)
.map(|(x,(val,t),(_,_,q))| (x,val,t,q)) // reformatting
);
var_min
}}
}
#[macro_export]
macro_rules! except {
($var1:expr, $var2:expr, $scope:expr) => {{
// let result = Variable::new(
// $var1.stream.concat(&$var2.stream.negate()),
// $var1.working.concat(&$var2.working.negate()),
// &mut $scope
// );
// $var1.depends.add(
// &result.depends.stream
// .map(|(x,y,t,q)| ((x,y),(t,q)))
// .join(&lift!($var1.stream.concat(&$var1.working)).leave().enter(&$scope))
// .filter(|&(_,(t1,_),t2)| t1 >= t2)
// .map(|((x,y),(_,q),t)| (x,y,t,q))
// );
// $var2.depends.add(
// &result.depends.stream
// .map(|(x,y,t,q)| ((x,y),(t,q)))
// .join(&lift!($var2.stream.concat(&$var2.working)).leave().enter(&$scope))
// .filter(|&(_,(t1,_),t2)| t1 >= t2)
// .map(|((x,y),(_,q),t)| (x,y,t,q))
// );
let result = Variable::new(
$var1.stream.concat(&$var2.stream.negate()),
$var1.working.concat(&$var2.working.negate()),
&mut $scope
);
$var1.depends.add(&result.depends.stream);
$var2.depends.add(&result.depends.stream);
result
}}
}
#[macro_export]
macro_rules! leave {
($var:expr, $scope:expr) => {{
let result = Variable::new( $var.stream.leave(), $var.working.leave(), &mut $scope );
$var.depends.add(
&result.depends.stream
.map(|(x,y,t,q)| ((x,y),(t,q)))
.join(&lift!($var.stream.concat(&$var.working)).leave().enter(&$scope))
.map(|((x,y),(_,q),t)| (x,y,t,q))
);
result
}}
}
/// A collection defined by multiple mutually recursive rules.
pub struct MonotonicVariable<'a, G: Scope, D: Data+Default>
where G::Timestamp: Lattice {
pub feedback: Option<Handle<G::Timestamp, u32,(D, i32)>>,
pub stream: Collection<Child<'a, G, u32>, D>,
pub current: Collection<Child<'a, G, u32>, D>,
}
impl<'a, G: Scope, D: Data+Default> MonotonicVariable<'a, G, D> where G::Timestamp: Lattice {
/// Creates a new `Variable` and a `Stream` representing its output, from a supplied `source` stream.
pub fn new(scope: &mut Child<'a, G, u32>) -> MonotonicVariable<'a, G, D> {
let (feedback, cycle) = scope.loop_variable(u32::max_value(), 1);
let cycle = Collection::new(cycle);
MonotonicVariable { feedback: Some(feedback), stream: cycle.clone(), current: cycle.clone() }
}
/// Adds a new source of data to the `Variable`.
pub fn add(&mut self, source: &Collection<Child<'a, G, u32>, D>) {
self.current = self.current.concat(source);
}
pub fn scope(&self) -> Child<'a, G, u32> {
self.current.scope()
}
}
impl<'a, G: Scope, D: Data+Default> Drop for MonotonicVariable<'a, G, D> where G::Timestamp: Lattice {
fn drop(&mut self) {
if let Some(feedback) = self.feedback.take() {
self.current.threshold(|_, w| if w > 0 { 1 } else { 0 })
.inner
.connect_loop(feedback);
}
}
}
/// Container for feedback edges for a explanation-traced variable.
pub struct VariableFeedback<'a, G, K, V, Gp>
where G: Scope,
K: Data+Default,
V: Data+Default,
Gp: Scope<Timestamp=Product<Product<RootTimestamp, u32>, u32>>,
G::Timestamp: Ord+Hash {
handles: Option<(Handle<G::Timestamp, u32, ((K,V), i32)>,
Handle<G::Timestamp, u32, ((K,V), i32)>)>,
variable: Variable<'a, Child<'a, G, u32>, K, V, Gp>,
}
impl<'a, G, K, V, Gp> VariableFeedback<'a, G, K, V, Gp>
where G: Scope,
K: Data+Default,
V: Data+Default,
Gp: Scope<Timestamp=Product<Product<RootTimestamp, u32>, u32>>,
G::Timestamp: Ord+Hash {
pub fn new(scope: &mut Child<'a, G, u32>, explanation_scope: &mut Child<'a, Gp, u32>) -> Self {
let (handle1, cycle1) = scope.loop_variable(u32::max_value(), 1); let cycle1 = Collection::new(cycle1);
let (handle2, cycle2) = scope.loop_variable(u32::max_value(), 1); let cycle2 = Collection::new(cycle2);
VariableFeedback {
handles: Some((handle1, handle2)),
variable: Variable::new(cycle1, cycle2, explanation_scope),
}
}
pub fn set(&mut self, source: &mut Variable<'a, Child<'a, G, u32>, K, V, Gp>) {
if let Some((handle1, handle2)) = self.handles.take() {
source.stream.inner.connect_loop(handle1);
source.working.inner.connect_loop(handle2);
source.depends.add(
&self.variable.depends.stream
.filter(|&(_,_,t,_)| t.inner > 0)
.map(|(x,l,t,q)| (x,l,Product::new(t.outer, t.inner - 1),q))
);
}
}
}
impl<'a, G, K, V, Gp> ::std::ops::Deref for VariableFeedback<'a, G, K, V, Gp>
where G: Scope,
K: Data+Default,
V: Data+Default,
Gp: Scope<Timestamp=Product<Product<RootTimestamp, u32>, u32>>,
G::Timestamp: Ord+Hash {
type Target = Variable<'a, Child<'a, G, u32>, K, V, Gp>;
fn deref(&self) -> &Self::Target {
&self.variable
}
}
impl<'a, G, K, V, Gp> ::std::ops::DerefMut for VariableFeedback<'a, G, K, V, Gp>
where G: Scope,
K: Data+Default,
V: Data+Default,
Gp: Scope<Timestamp=Product<Product<RootTimestamp, u32>, u32>>,
G::Timestamp: Ord+Hash {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.variable
}
}
|
use actix_web::{AsyncResponder, Error, HttpMessage, HttpResponse, HttpRequest};
use futures::Future;
#[derive(Deserialize,Serialize, Debug)]
struct Info {
username: String,
email: String,
password: String,
confirm_password: String,
}
pub fn info(req: HttpRequest) -> Box<Future<Item=HttpResponse, Error=Error>> {
req.json()
.from_err()
.and_then(|res: Info| {
Ok(HttpResponse::Ok().json(res))
}).responder()
}
|
use std::{io, mem::MaybeUninit};
use crate::{
errors::{DriverError, Error, Result},
types::{column::StringPool, StatBuffer, Unmarshal},
};
pub(crate) trait ReadEx {
fn read_bytes(&mut self, rv: &mut [u8]) -> Result<()>;
fn read_scalar<V>(&mut self) -> Result<V>
where
V: Copy + Unmarshal<V> + StatBuffer;
fn read_string(&mut self) -> Result<String>;
fn skip_string(&mut self) -> Result<()>;
fn read_uvarint(&mut self) -> Result<u64>;
fn read_str_into_buffer(&mut self, pool: &mut StringPool) -> Result<()>;
}
const MAX_STACK_BUFFER_LEN: usize = 1024;
impl<T> ReadEx for T
where
T: io::Read,
{
fn read_bytes(&mut self, rv: &mut [u8]) -> Result<()> {
let mut i = 0;
while i < rv.len() {
let res_nread = {
let buf = &mut rv[i..];
self.read(buf)
};
match res_nread {
Ok(0) => {
let ret = io::Error::new(io::ErrorKind::WouldBlock, "would block");
return Err(ret.into());
}
Ok(nread) => i += nread,
Err(e) => return Err(From::from(e)),
}
}
Ok(())
}
fn read_scalar<V>(&mut self) -> Result<V>
where
V: Copy + Unmarshal<V> + StatBuffer,
{
let mut buffer = V::buffer();
self.read_bytes(buffer.as_mut())?;
Ok(V::unmarshal(buffer.as_ref()))
}
fn read_string(&mut self) -> Result<String> {
let str_len = self.read_uvarint()? as usize;
let mut buffer = vec![0_u8; str_len];
self.read_bytes(buffer.as_mut())?;
Ok(String::from_utf8(buffer)?)
}
fn skip_string(&mut self) -> Result<()> {
let str_len = self.read_uvarint()? as usize;
if str_len <= MAX_STACK_BUFFER_LEN {
unsafe {
let mut buffer: [MaybeUninit<u8>; MAX_STACK_BUFFER_LEN] =
MaybeUninit::uninit().assume_init();
self.read_bytes(
&mut *(&mut buffer[..str_len] as *mut [MaybeUninit<u8>] as *mut [u8]),
)?;
}
} else {
let mut buffer = vec![0_u8; str_len];
self.read_bytes(buffer.as_mut())?;
}
Ok(())
}
fn read_uvarint(&mut self) -> Result<u64> {
let mut x = 0_u64;
let mut s = 0_u32;
let mut i = 0_usize;
loop {
let b: u8 = self.read_scalar()?;
if b < 0x80 {
if i > 9 || i == 9 && b > 1 {
return Err(Error::Driver(DriverError::Overflow));
}
return Ok(x | (u64::from(b) << s));
}
x |= u64::from(b & 0x7f) << s;
s += 7;
i += 1;
}
}
fn read_str_into_buffer(&mut self, pool: &mut StringPool) -> Result<()> {
let str_len = self.read_uvarint()? as usize;
let buffer = pool.allocate(str_len);
self.read_bytes(buffer)?;
Ok(())
}
}
#[test]
fn test_read_uvarint() {
use super::ReadEx;
use std::io::Cursor;
let bytes = [194_u8, 10];
let mut cursor = Cursor::new(bytes);
let actual = cursor.read_uvarint().unwrap();
assert_eq!(actual, 1346)
}
|
use amethyst::{
core::timing::Time,
core::transform::Transform,
derive::SystemDesc,
ecs::{Join, Read, ReadStorage, System, SystemData, WriteStorage},
};
use crate::components::ball::{Ball, Dead};
#[derive(SystemDesc)]
pub struct BallMoveSystem;
pub const BALL_MOVE_SYSTEM: &str = "ball_move_system";
impl<'s> System<'s> for BallMoveSystem {
type SystemData = (
ReadStorage<'s, Ball>,
WriteStorage<'s, Transform>,
ReadStorage<'s, Dead>,
Read<'s, Time>,
);
fn run(&mut self, (balls, mut locals, deads, time): Self::SystemData) {
for (ball, local, ()) in (&balls, &mut locals, !&deads).join() {
local.prepend_translation_x(ball.velocity[0] * 1.01 * time.delta_seconds());
local.prepend_translation_y(ball.velocity[1] * 1.01 * time.delta_seconds());
}
}
}
|
use chrono::serde::ts_seconds;
use chrono::{DateTime, Utc};
use serde::Serialize;
use serde_json::Value as JsonValue;
use sqlx::postgres::{types::PgRange, PgConnection};
use uuid::Uuid;
use super::{AgentId, ClassType, KeyValueProperties, Object, Time, WrongKind};
#[cfg(test)]
use super::{GenericReadQuery, WebinarType};
#[derive(Clone, Debug, Serialize, sqlx::FromRow)]
pub struct Webinar {
id: Uuid,
#[serde(skip)]
#[allow(dead_code)]
kind: ClassType,
scope: String,
#[serde(with = "super::serde::time")]
time: Time,
audience: String,
#[serde(with = "ts_seconds")]
created_at: DateTime<Utc>,
#[serde(skip_serializing_if = "Option::is_none")]
tags: Option<JsonValue>,
properties: KeyValueProperties,
conference_room_id: Uuid,
event_room_id: Uuid,
#[serde(skip_serializing_if = "Option::is_none")]
original_event_room_id: Option<Uuid>,
#[serde(skip_serializing_if = "Option::is_none")]
modified_event_room_id: Option<Uuid>,
preserve_history: bool,
reserve: Option<i32>,
room_events_uri: Option<String>,
}
impl std::convert::TryFrom<Object> for Webinar {
type Error = WrongKind;
fn try_from(value: Object) -> Result<Self, Self::Error> {
match value.kind() {
ClassType::Webinar => Ok(Self {
conference_room_id: value.conference_room_id,
id: value.id,
kind: value.kind,
scope: value.scope,
time: value.time,
audience: value.audience,
event_room_id: value.event_room_id,
original_event_room_id: value.original_event_room_id,
modified_event_room_id: value.modified_event_room_id,
created_at: value.created_at,
tags: value.tags,
properties: value.properties,
preserve_history: value.preserve_history,
reserve: value.reserve,
room_events_uri: value.room_events_uri,
}),
_ => Err(WrongKind::new(&value, ClassType::Webinar)),
}
}
}
#[cfg(test)]
pub type WebinarReadQuery = GenericReadQuery<WebinarType>;
pub struct WebinarInsertQuery {
scope: String,
audience: String,
time: Time,
tags: Option<JsonValue>,
properties: Option<KeyValueProperties>,
preserve_history: bool,
conference_room_id: Uuid,
event_room_id: Uuid,
original_event_room_id: Option<Uuid>,
modified_event_room_id: Option<Uuid>,
reserve: Option<i32>,
room_events_uri: Option<String>,
}
impl WebinarInsertQuery {
pub fn new(
scope: String,
audience: String,
time: Time,
conference_room_id: Uuid,
event_room_id: Uuid,
) -> Self {
Self {
scope,
audience,
time,
tags: None,
properties: None,
preserve_history: true,
conference_room_id,
event_room_id,
original_event_room_id: None,
modified_event_room_id: None,
reserve: None,
room_events_uri: None,
}
}
pub fn tags(self, tags: JsonValue) -> Self {
Self {
tags: Some(tags),
..self
}
}
pub fn properties(self, properties: KeyValueProperties) -> Self {
Self {
properties: Some(properties),
..self
}
}
pub fn original_event_room_id(self, id: Uuid) -> Self {
Self {
original_event_room_id: Some(id),
..self
}
}
pub fn modified_event_room_id(self, id: Uuid) -> Self {
Self {
modified_event_room_id: Some(id),
..self
}
}
#[cfg(test)]
pub fn reserve(self, reserve: i32) -> Self {
Self {
reserve: Some(reserve),
..self
}
}
pub async fn execute(self, conn: &mut PgConnection) -> sqlx::Result<Object> {
let time: PgRange<DateTime<Utc>> = self.time.into();
sqlx::query_as!(
Object,
r#"
INSERT INTO class (
scope, audience, time, tags, preserve_history, kind,
conference_room_id, event_room_id,
original_event_room_id, modified_event_room_id, reserve, room_events_uri,
properties
)
VALUES ($1, $2, $3, $4, $5, $6::class_type, $7, $8, $9, $10, $11, $12, $13)
RETURNING
id,
scope,
kind AS "kind!: ClassType",
audience,
time AS "time!: Time",
tags,
properties AS "properties: _",
preserve_history,
created_at,
event_room_id AS "event_room_id!: Uuid",
conference_room_id AS "conference_room_id!: Uuid",
original_event_room_id,
modified_event_room_id,
reserve,
room_events_uri,
host AS "host: AgentId",
timed_out,
original_class_id,
content_id
"#,
self.scope,
self.audience,
time,
self.tags,
self.preserve_history,
ClassType::Webinar as ClassType,
Some(self.conference_room_id),
self.event_room_id,
self.original_event_room_id,
self.modified_event_room_id,
self.reserve,
self.room_events_uri,
self.properties.unwrap_or_default() as KeyValueProperties,
)
.fetch_one(conn)
.await
}
}
|
#[doc = "Register `SPI_CR2` reader"]
pub type R = crate::R<SPI_CR2_SPEC>;
#[doc = "Register `SPI_CR2` writer"]
pub type W = crate::W<SPI_CR2_SPEC>;
#[doc = "Field `TSIZE` reader - TSIZE"]
pub type TSIZE_R = crate::FieldReader<u16>;
#[doc = "Field `TSIZE` writer - TSIZE"]
pub type TSIZE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 16, O, u16>;
#[doc = "Field `TSER` reader - TSER"]
pub type TSER_R = crate::FieldReader<u16>;
#[doc = "Field `TSER` writer - TSER"]
pub type TSER_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 16, O, u16>;
impl R {
#[doc = "Bits 0:15 - TSIZE"]
#[inline(always)]
pub fn tsize(&self) -> TSIZE_R {
TSIZE_R::new((self.bits & 0xffff) as u16)
}
#[doc = "Bits 16:31 - TSER"]
#[inline(always)]
pub fn tser(&self) -> TSER_R {
TSER_R::new(((self.bits >> 16) & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 0:15 - TSIZE"]
#[inline(always)]
#[must_use]
pub fn tsize(&mut self) -> TSIZE_W<SPI_CR2_SPEC, 0> {
TSIZE_W::new(self)
}
#[doc = "Bits 16:31 - TSER"]
#[inline(always)]
#[must_use]
pub fn tser(&mut self) -> TSER_W<SPI_CR2_SPEC, 16> {
TSER_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "SPI control register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`spi_cr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`spi_cr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SPI_CR2_SPEC;
impl crate::RegisterSpec for SPI_CR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`spi_cr2::R`](R) reader structure"]
impl crate::Readable for SPI_CR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`spi_cr2::W`](W) writer structure"]
impl crate::Writable for SPI_CR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets SPI_CR2 to value 0"]
impl crate::Resettable for SPI_CR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::string;
use self::XPathToken::*;
#[deriving(PartialEq,Show,Clone)]
pub enum XPathToken {
And,
AtSign,
CurrentNode,
Divide,
DollarSign,
DoubleColon,
DoubleSlash,
Equal,
GreaterThan,
GreaterThanOrEqual,
LeftBracket,
LeftParen,
LessThan,
LessThanOrEqual,
Literal(string::String),
MinusSign,
Multiply,
NotEqual,
Number(f64),
Or,
ParentNode,
Pipe,
PlusSign,
PrefixedName(string::String, string::String),
Remainder,
RightBracket,
RightParen,
Slash,
String(string::String),
// Specializations
Axis(string::String),
Function(string::String),
NodeTest(string::String),
}
impl XPathToken {
pub fn precedes_node_test(& self) -> bool {
match *self {
AtSign |
DoubleColon => true,
_ => false,
}
}
pub fn precedes_expression(& self) -> bool {
match *self {
LeftParen |
LeftBracket => true,
_ => false,
}
}
pub fn is_operator(& self) -> bool {
match *self {
Slash |
DoubleSlash |
PlusSign |
MinusSign |
Pipe |
Equal |
NotEqual |
LessThan |
LessThanOrEqual |
GreaterThan |
GreaterThanOrEqual |
And |
Or |
Remainder |
Divide |
Multiply => true,
_ => false,
}
}
}
|
use proc_macro::{TokenStream, TokenTree};
trait Concat{
fn concat(&mut self, other: String);
}
impl Concat for String{
fn concat(&mut self, other: String){
*self = format!("{}{}", self, other);
}
}
#[proc_macro]
pub fn impl_joinable(input: TokenStream) -> TokenStream {
let tokens: Vec<String> = input
.to_string()
.split(',')
.map(|token|{
token.to_uppercase()
})
.collect();
let mut out_stream = String::new();
out_stream.concat(format!("impl<'w"));
for token in tokens.iter(){
out_stream.concat(format!(",{} ", token));
}
out_stream.concat(format!("> Joinable<'w> for ("));
for (n, token) in tokens.iter().enumerate(){
if n == 0{
out_stream.concat(format!("{}", token));
}
else{
out_stream.concat(format!(",{}", token));
}
}
out_stream.concat(format!(")"));
for (n, token) in tokens.iter().enumerate(){
if n == 0{
out_stream.concat(format!("where {}: Joinable<'w> + 'w", token));
}
else{
out_stream.concat(format!(",{}: Joinable<'w> + 'w", token));
}
}
// Open impl block
out_stream.concat(format!("{{\ntype Target = ("));
for (n, token) in tokens.iter().enumerate(){
if n == 0{
out_stream.concat(format!("{}::Target", token));
}
else{
out_stream.concat(format!(",{}::Target", token));
}
}
// Open join block
// Open JoinIter block
// Open box block
out_stream.concat(format!(");
fn join(self) -> JoinIter<'w, Self::Target>{{
JoinIter{{
items: Box::new("));
for (n, token) in tokens.iter().enumerate(){
if n == 0{
out_stream.concat(format!("self.{}.join().items", n));
}
else{
out_stream.concat(format!(".zip(self.{}.join().items)", n));
}
}
out_stream.concat(format!(".map(|"));
for (n, token) in tokens.iter().enumerate(){
if n == 0{
for par in 0..(tokens.len() - 1){
out_stream.concat(format!("("));
}
out_stream.concat(format!("{}", token.to_lowercase()));
}
else{
out_stream.concat(format!(",{})", token.to_lowercase()));
}
}
out_stream.concat(format!("|{{ if "));
for (n, token) in tokens.iter().enumerate(){
if n == 0{
out_stream.concat(format!("{}.0", token.to_lowercase()));
}
else{
out_stream.concat(format!("&& {}.0", token.to_lowercase()));
}
}
out_stream.concat(format!("{{ return (true, Some(("));
for (n, token) in tokens.iter().enumerate(){
if n == 0{
out_stream.concat(format!("{}.1.unwrap()", token.to_lowercase()));
}
else{
out_stream.concat(format!(", {}.1.unwrap()", token.to_lowercase()));
}
}
out_stream.concat(format!(")));}}else{{return (false, None);}} }})"));
// Close box block
// Close JoinIter block
// Close join block
out_stream.concat(format!(")\n }}\n }}\n }}"));
out_stream.parse().unwrap()
}
#[proc_macro]
pub fn impl_joinable_multi(input: TokenStream) -> TokenStream {
let arg = input.to_string().parse::<usize>().unwrap();
let mut out_stream = String::new();
for i in (0..arg).skip(1){
out_stream.concat(format!("impl_joinable!("));
for j in 0..=i{
if j == 0{
out_stream.concat(format!("T{}", j));
}
else{
out_stream.concat(format!(", T{}", j));
}
}
out_stream.concat(format!(");"));
}
out_stream.parse().unwrap()
}
#[proc_macro]
pub fn impl_system_data(input: TokenStream) -> TokenStream{
let tokens: Vec<String> = input
.to_string()
.split(',')
.map(|token|{
token.to_uppercase()
})
.collect();
let mut out_stream = String::new();
out_stream.concat(format!("impl<'d"));
for token in tokens.iter(){
out_stream.concat(format!(", {}: SystemData<'d>", token));
}
out_stream.concat(format!("> SystemData<'d> for ({}", tokens[0]));
for token in tokens.iter().skip(1){
out_stream.concat(format!(", {}", token));
}
out_stream.concat(format!("){{ fn get_data<'w: 'd, W: WorldCommon>(world: &'w W) -> Self{{({}::get_data(world)", tokens[0]));
for token in tokens.iter().skip(1){
out_stream.concat(format!(", {}::get_data(world)", token));
}
out_stream.concat(format!(")}}"));
out_stream.concat(format!("fn get_dep_vec<'w: 'd, W: WorldCommon>(world: &W) -> DepVec{{ {}::get_dep_vec(world)", tokens[0]));
for token in tokens.iter().skip(1){
out_stream.concat(format!(".and(&{}::get_dep_vec(world))", token));
}
out_stream.concat(format!("}} }}"));
out_stream.parse().unwrap()
}
#[proc_macro]
pub fn impl_system_data_multi(input: TokenStream) -> TokenStream {
let arg = input.to_string().parse::<usize>().unwrap();
let mut out_stream = String::new();
for i in (0..arg).skip(1){
out_stream.concat(format!("impl_system_data!("));
for j in 0..=i{
if j == 0{
out_stream.concat(format!("T{}", j));
}
else{
out_stream.concat(format!(", T{}", j));
}
}
out_stream.concat(format!(");"));
}
out_stream.parse().unwrap()
} |
#[doc = "Register `WIFCR` reader"]
pub type R = crate::R<WIFCR_SPEC>;
#[doc = "Register `WIFCR` writer"]
pub type W = crate::W<WIFCR_SPEC>;
#[doc = "Field `CTEIF` reader - Clear Tearing Effect Interrupt Flag"]
pub type CTEIF_R = crate::BitReader;
#[doc = "Field `CTEIF` writer - Clear Tearing Effect Interrupt Flag"]
pub type CTEIF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CERIF` reader - Clear End of Refresh Interrupt Flag"]
pub type CERIF_R = crate::BitReader;
#[doc = "Field `CERIF` writer - Clear End of Refresh Interrupt Flag"]
pub type CERIF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CPLLLIF` reader - Clear PLL Lock Interrupt Flag"]
pub type CPLLLIF_R = crate::BitReader;
#[doc = "Field `CPLLLIF` writer - Clear PLL Lock Interrupt Flag"]
pub type CPLLLIF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CPLLUIF` reader - Clear PLL Unlock Interrupt Flag"]
pub type CPLLUIF_R = crate::BitReader;
#[doc = "Field `CPLLUIF` writer - Clear PLL Unlock Interrupt Flag"]
pub type CPLLUIF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CRRIF` reader - Clear Regulator Ready Interrupt Flag"]
pub type CRRIF_R = crate::BitReader;
#[doc = "Field `CRRIF` writer - Clear Regulator Ready Interrupt Flag"]
pub type CRRIF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - Clear Tearing Effect Interrupt Flag"]
#[inline(always)]
pub fn cteif(&self) -> CTEIF_R {
CTEIF_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Clear End of Refresh Interrupt Flag"]
#[inline(always)]
pub fn cerif(&self) -> CERIF_R {
CERIF_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 9 - Clear PLL Lock Interrupt Flag"]
#[inline(always)]
pub fn cplllif(&self) -> CPLLLIF_R {
CPLLLIF_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - Clear PLL Unlock Interrupt Flag"]
#[inline(always)]
pub fn cplluif(&self) -> CPLLUIF_R {
CPLLUIF_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 13 - Clear Regulator Ready Interrupt Flag"]
#[inline(always)]
pub fn crrif(&self) -> CRRIF_R {
CRRIF_R::new(((self.bits >> 13) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Clear Tearing Effect Interrupt Flag"]
#[inline(always)]
#[must_use]
pub fn cteif(&mut self) -> CTEIF_W<WIFCR_SPEC, 0> {
CTEIF_W::new(self)
}
#[doc = "Bit 1 - Clear End of Refresh Interrupt Flag"]
#[inline(always)]
#[must_use]
pub fn cerif(&mut self) -> CERIF_W<WIFCR_SPEC, 1> {
CERIF_W::new(self)
}
#[doc = "Bit 9 - Clear PLL Lock Interrupt Flag"]
#[inline(always)]
#[must_use]
pub fn cplllif(&mut self) -> CPLLLIF_W<WIFCR_SPEC, 9> {
CPLLLIF_W::new(self)
}
#[doc = "Bit 10 - Clear PLL Unlock Interrupt Flag"]
#[inline(always)]
#[must_use]
pub fn cplluif(&mut self) -> CPLLUIF_W<WIFCR_SPEC, 10> {
CPLLUIF_W::new(self)
}
#[doc = "Bit 13 - Clear Regulator Ready Interrupt Flag"]
#[inline(always)]
#[must_use]
pub fn crrif(&mut self) -> CRRIF_W<WIFCR_SPEC, 13> {
CRRIF_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "DSI Wrapper Interrupt Flag Clear Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`wifcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`wifcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct WIFCR_SPEC;
impl crate::RegisterSpec for WIFCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`wifcr::R`](R) reader structure"]
impl crate::Readable for WIFCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`wifcr::W`](W) writer structure"]
impl crate::Writable for WIFCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets WIFCR to value 0"]
impl crate::Resettable for WIFCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
extern crate md5;
extern crate walkdir;
extern crate colored;
extern crate boyer_moore;
mod signature;
mod scanner;
mod virus_database;
use virus_database::VirusDatabase;
use scanner::Scanner;
use std::io;
use std::io::Write;
use std::fs::metadata;
fn main() {
loop {
println!("Input your command");
print!(">");
io::stdout().flush().expect("Error flushing output stream");
let mut user_input = String::new();
io::stdin().read_line(&mut user_input).expect("Could not read user input.");
user_input.pop();
let parameters: Vec<&str> = user_input.split_whitespace().collect();
if parameters.len() > 0 {
match parameters[0] {
"add" => add_signature(parameters),
"scan" => perform_scan(parameters),
"help" => show_help(),
"exit" => ::std::process::exit(0),
_ => println!("Command not recognised."),
}
}
}
}
fn perform_scan(parameters: Vec<&str>) {
if parameters.len() < 3 {
println!("Not enough arguments");
()
}
let metadata = metadata(parameters[1]).unwrap();
let db = VirusDatabase::new(parameters[2]).expect("Could not open database file");
let scanner = Scanner::new(db);
if metadata.is_file() {
scanner.scan_file(parameters[1]).expect("Failed to scan file");
} else if metadata.is_dir() {
scanner.scan_system(parameters[1]);
}
}
fn add_signature(parameters: Vec<&str>) {
if parameters.len() < 4 {
println!("Not enough arguments!");
()
}
let mut db = VirusDatabase::new(parameters[3]).expect("Could not open database file");
let mut sig = signature::generate_signature(parameters[1]);
sig.set_description(parameters[2].to_string());
db.add_signature(sig).expect("Error adding signature");
}
fn show_help() {
println!("->scan <dir/file> <database>
->add <file> <database>
->help
->exit");
}
|
use cosmwasm_std::{
entry_point, from_binary, to_binary, Addr, BankMsg, Binary, Deps, DepsMut, Env, MessageInfo,
Order, Response, StdResult, SubMsg, Timestamp, WasmMsg,
};
use crate::balance::GenericBalance;
use crate::error::ContractError;
use crate::msg::{AllLocksResponse, ExecuteMsg, InstantiateMsg, LockInfo, QueryMsg, ReceiveMsg};
use crate::state::{Lock, State, LOCKS, STATE};
use cw2::set_contract_version;
use cw20::{Balance, Cw20Coin, Cw20CoinVerified, Cw20ExecuteMsg, Cw20ReceiveMsg};
// version info for migration info
const CONTRACT_NAME: &str = "crates.io:cw-lockbox";
const CONTRACT_VERSION: &str = env!("CARGO_PKG_VERSION");
// Note, you can use StdResult in some functions where you do not
// make use of the custom errors
#[entry_point]
pub fn instantiate(
deps: DepsMut,
_env: Env,
info: MessageInfo,
msg: InstantiateMsg,
) -> Result<Response, ContractError> {
set_contract_version(deps.storage, CONTRACT_NAME, CONTRACT_VERSION)?;
let state = State {
max_lock_time: msg.max_lock_time,
owner: info.sender,
};
STATE.save(deps.storage, &state)?;
Ok(Response::default())
}
// And declare a custom Error variant for the ones where you will want to make use of it
#[entry_point]
pub fn execute(
deps: DepsMut,
env: Env,
info: MessageInfo,
msg: ExecuteMsg,
) -> Result<Response, ContractError> {
match msg {
ExecuteMsg::Lock { id, expire } => try_lock(
deps,
env,
Balance::from(info.funds),
&info.sender,
id,
expire,
),
ExecuteMsg::IncreaseLock { id } => {
try_increase_lock(deps, env, Balance::from(info.funds), &info.sender, id)
}
ExecuteMsg::Unlock { id } => try_unlock(deps, env, info, id),
ExecuteMsg::Receive(msg) => try_receive(deps, env, info, msg),
}
}
pub fn try_lock(
deps: DepsMut,
env: Env,
balance: Balance,
sender: &Addr,
id: String,
expire: Timestamp,
) -> Result<Response, ContractError> {
if balance.is_empty() {
return Err(ContractError::EmptyBalance {});
}
let current_time = env.block.time;
if current_time.ge(&expire) {
return Err(ContractError::LowExpired {});
}
let state = STATE.load(deps.storage)?;
let diff = expire.minus_seconds(current_time.seconds());
if diff.seconds().ge(&state.max_lock_time) {
return Err(ContractError::HighExpired {});
}
let lock = Lock {
create: env.block.time,
expire,
funds: balance.into(),
};
let key = (sender, id.to_owned());
// try to store it, fail if the id was already in use
LOCKS.update(deps.storage, key, |existing| match existing {
None => Ok(lock),
Some(_) => Err(ContractError::AlreadyInUse {}),
})?;
let res = Response::new()
.add_attribute("action", "lock")
.add_attribute("from", sender)
.add_attribute("id", id);
Ok(res)
}
pub fn try_increase_lock(
deps: DepsMut,
env: Env,
balance: Balance,
sender: &Addr,
id: String,
) -> Result<Response, ContractError> {
if balance.is_empty() {
return Err(ContractError::EmptyBalance {});
}
let key = (sender, id.to_owned());
let mut lock = LOCKS.load(deps.storage, key.clone())?;
if env.block.time.gt(&lock.expire) {
return Err(ContractError::LockExpired {});
}
lock.funds.add_tokens(balance);
LOCKS.save(deps.storage, key, &lock)?;
let res = Response::new()
.add_attribute("action", "increase_lock")
.add_attribute("from", sender)
.add_attribute("id", id);
Ok(res)
}
pub fn try_unlock(
deps: DepsMut,
env: Env,
info: MessageInfo,
id: String,
) -> Result<Response, ContractError> {
let key = (&info.sender, id);
let lock = LOCKS.load(deps.storage, key.clone())?;
if env.block.time.le(&lock.expire) {
return Err(ContractError::LockNotExpired {});
}
// unlock all tokens
let messages = send_tokens(&info.sender, &lock.funds)?;
// remove lock
LOCKS.remove(deps.storage, key);
let res = Response::new()
.add_attribute("action", "unlock")
.add_attribute("from", info.sender)
.add_submessages(messages);
Ok(res)
}
pub fn try_receive(
deps: DepsMut,
env: Env,
info: MessageInfo,
wrapper: Cw20ReceiveMsg,
) -> Result<Response, ContractError> {
let msg: ReceiveMsg = from_binary(&wrapper.msg)?;
let balance = Balance::Cw20(Cw20CoinVerified {
address: info.sender,
amount: wrapper.amount,
});
let api = deps.api;
let sender = &api.addr_validate(&wrapper.sender)?;
match msg {
ReceiveMsg::Lock { id, expire } => try_lock(deps, env, balance, sender, id, expire),
ReceiveMsg::IncreaseLock { id } => try_increase_lock(deps, env, balance, sender, id),
}
}
fn send_tokens(to: &Addr, balance: &GenericBalance) -> StdResult<Vec<SubMsg>> {
let native_balance = &balance.native;
let mut msgs: Vec<SubMsg> = if native_balance.is_empty() {
vec![]
} else {
vec![SubMsg::new(BankMsg::Send {
to_address: to.into(),
amount: native_balance.to_vec(),
})]
};
let cw20_balance = &balance.cw20;
let cw20_msgs: StdResult<Vec<_>> = cw20_balance
.iter()
.map(|c| {
let msg = Cw20ExecuteMsg::Transfer {
recipient: to.into(),
amount: c.amount,
};
let exec = WasmMsg::Execute {
contract_addr: c.address.to_string(),
msg: to_binary(&msg)?,
funds: vec![],
};
Ok(SubMsg::new(exec))
})
.collect();
msgs.append(&mut cw20_msgs?);
Ok(msgs)
}
#[entry_point]
pub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> {
match msg {
QueryMsg::Lock { address, id } => to_binary(&query_lock(deps, address, id)?),
QueryMsg::AllLocks { address } => to_binary(&query_locks(deps, address)?),
}
}
fn query_lock(deps: Deps, address: String, id: String) -> StdResult<LockInfo> {
let key = (&deps.api.addr_validate(&address)?, id.to_owned());
let lock = LOCKS.load(deps.storage, key)?;
to_lock_info(lock, id)
}
fn query_locks(deps: Deps, address: String) -> StdResult<AllLocksResponse> {
let owner_addr = &deps.api.addr_validate(&address)?;
let locks_id: Result<Vec<_>, _> = LOCKS
.prefix(owner_addr)
.keys(deps.storage, None, None, Order::Ascending)
.map(String::from_utf8)
.collect();
Ok(AllLocksResponse { locks: locks_id? })
}
fn to_lock_info(lock: Lock, id: String) -> StdResult<LockInfo> {
// transform tokens
let native_balance = lock.funds.native;
let cw20_balance: StdResult<Vec<_>> = lock
.funds
.cw20
.into_iter()
.map(|token| {
Ok(Cw20Coin {
address: token.address.into(),
amount: token.amount,
})
})
.collect();
let lock_info = LockInfo {
id,
create: lock.create,
expire: lock.expire,
native_balance,
cw20_balance: cw20_balance?,
};
Ok(lock_info)
}
#[cfg(test)]
mod tests {
use super::*;
use cosmwasm_std::testing::{
mock_dependencies, mock_dependencies_with_balance, mock_env, mock_info,
};
use cosmwasm_std::{coins, from_binary, CosmosMsg, StdError, StdResult, SubMsg};
#[test]
fn proper_initialization() {
let mut deps = mock_dependencies();
let msg = InstantiateMsg {
max_lock_time: 3600,
};
let info = mock_info("creator", &coins(1000, "earth"));
// we can just call .unwrap() to assert this was a success
let res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();
assert_eq!(0, res.messages.len());
}
#[test]
fn lock() {
let mut deps = mock_dependencies();
let msg = InstantiateMsg {
max_lock_time: 3600,
};
let info = mock_info("creator", &[]);
let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();
// empty funds
let info = mock_info("anyone", &[]);
let msg = ExecuteMsg::Lock {
id: "1".into(),
expire: Timestamp::from_seconds(10),
};
let res = execute(deps.as_mut(), mock_env(), info, msg);
match res {
Err(ContractError::EmptyBalance {}) => {}
_ => panic!("Must return EmptyBalance error"),
}
// lower expire
let info = mock_info("anyone", &coins(2, "token"));
let msg = ExecuteMsg::Lock {
id: "1".into(),
expire: Timestamp::from_seconds(10),
};
let mut env = mock_env();
env.block.time = Timestamp::from_seconds(100);
let res = execute(deps.as_mut(), env.clone(), info.clone(), msg);
match res {
Err(ContractError::LowExpired {}) => {}
_ => panic!("Must return LowExpired error"),
}
// high expire
env.block.time = Timestamp::from_seconds(0);
let info = mock_info("anyone", &coins(2, "token"));
let msg = ExecuteMsg::Lock {
id: "1".into(),
expire: Timestamp::from_seconds(4000),
};
let res = execute(deps.as_mut(), env.clone(), info.clone(), msg);
match res {
Err(ContractError::HighExpired {}) => {}
_ => panic!("Must return HighExpired error"),
}
// lock funds 1
let msg = ExecuteMsg::Lock {
id: "1".into(),
expire: Timestamp::from_seconds(200),
};
let _res = execute(deps.as_mut(), env.clone(), info.clone(), msg).unwrap();
// should exists lock
let msg = QueryMsg::Lock {
address: "anyone".into(),
id: "1".into(),
};
let res = query(deps.as_ref(), mock_env(), msg).unwrap();
let value: LockInfo = from_binary(&res).unwrap();
assert_eq!(0, value.create.seconds());
assert_eq!(200, value.expire.seconds());
// try lock same id
let msg = ExecuteMsg::Lock {
id: "1".into(),
expire: Timestamp::from_seconds(200),
};
let res = execute(deps.as_mut(), env.clone(), info.clone(), msg);
match res {
Err(ContractError::AlreadyInUse {}) => {}
_ => panic!("Must return AlreadyInUse error"),
}
// lock funds 2
let msg = ExecuteMsg::Lock {
id: "2".into(),
expire: Timestamp::from_seconds(300),
};
let _res = execute(deps.as_mut(), env, info, msg).unwrap();
// should exists lock
let msg = QueryMsg::Lock {
address: "anyone".into(),
id: "2".into(),
};
let res = query(deps.as_ref(), mock_env(), msg).unwrap();
let value: LockInfo = from_binary(&res).unwrap();
assert_eq!(300, value.expire.seconds());
let res = query(
deps.as_ref(),
mock_env(),
QueryMsg::AllLocks {
address: "anyone".into(),
},
)
.unwrap();
let value: AllLocksResponse = from_binary(&res).unwrap();
assert_eq!(2, value.locks.len())
}
#[test]
fn increase_lock() {
let mut deps = mock_dependencies();
let msg = InstantiateMsg {
max_lock_time: 3600,
};
let info = mock_info("creator", &[]);
let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();
// lock funds
let mut env = mock_env();
let info = mock_info("anyone", &coins(2, "token"));
env.block.time = Timestamp::from_seconds(100);
let msg = ExecuteMsg::Lock {
id: "1".into(),
expire: Timestamp::from_seconds(200),
};
let _res = execute(deps.as_mut(), env.clone(), info.clone(), msg).unwrap();
// try increase lock invalid id
let info = mock_info("anyone", &coins(5, "token"));
let msg = ExecuteMsg::IncreaseLock { id: "2".into() };
let res = execute(deps.as_mut(), env.clone(), info.clone(), msg);
match res {
Err(ContractError::Std(StdError::NotFound { .. })) => {}
_ => panic!("Must return StdError::NotFound error"),
}
// try increase lock after expire
let info = mock_info("anyone", &coins(5, "token"));
let msg = ExecuteMsg::IncreaseLock { id: "1".into() };
env.block.time = Timestamp::from_seconds(201);
let res = execute(deps.as_mut(), env.clone(), info.clone(), msg);
match res {
Err(ContractError::LockExpired {}) => {}
_ => panic!("Must return LockExpired error"),
}
// increase valid lock
let msg = ExecuteMsg::IncreaseLock { id: "1".into() };
env.block.time = Timestamp::from_seconds(120);
let res = execute(deps.as_mut(), env.clone(), info, msg).unwrap();
assert_eq!(0, res.messages.len());
// query funds lock
let msg = QueryMsg::Lock {
address: "anyone".into(),
id: "1".into(),
};
let res = query(deps.as_ref(), mock_env(), msg).unwrap();
let value: LockInfo = from_binary(&res).unwrap();
assert_eq!(coins(7, "token"), value.native_balance);
}
#[test]
fn unlock() {
let mut deps = mock_dependencies_with_balance(&coins(2, "token"));
let msg = InstantiateMsg {
max_lock_time: 3600,
};
let info = mock_info("creator", &[]);
let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();
// lock funds
let info = mock_info("anyone", &coins(2, "token"));
let mut env = mock_env();
env.block.time = Timestamp::from_seconds(0);
let msg = ExecuteMsg::Lock {
id: "1".into(),
expire: Timestamp::from_seconds(400),
};
let _res = execute(deps.as_mut(), env, info, msg).unwrap();
// cannot unlock until expire
let auth_info = mock_info("anyone", &[]);
let msg = ExecuteMsg::Unlock { id: "1".into() };
let mut env = mock_env();
env.block.time = Timestamp::from_seconds(100);
let res = execute(deps.as_mut(), env.clone(), auth_info, msg);
match res {
Err(ContractError::LockNotExpired {}) => {}
_ => panic!("Must return LockNotExpired error"),
}
// unlock funds
let auth_info = mock_info("anyone", &[]);
let msg = ExecuteMsg::Unlock { id: "1".into() };
env.block.time = Timestamp::from_seconds(401);
let res = execute(deps.as_mut(), env, auth_info, msg).unwrap();
assert_eq!(1, res.messages.len());
assert_eq!(
res.messages[0],
SubMsg::new(CosmosMsg::Bank(BankMsg::Send {
to_address: "anyone".into(),
amount: coins(2, "token")
}))
);
// should lock completed
let msg = QueryMsg::Lock {
address: "anyone".into(),
id: "1".into(),
};
let res = query(deps.as_ref(), mock_env(), msg);
match res {
StdResult::Err(StdError::NotFound { .. }) => {}
_ => panic!("Must return StdError::NotFound error"),
}
}
}
|
#![cfg(not(any(target_os = "wasi", target_os = "redox")))]
use io_lifetimes::AsFd;
use posish::time::{clock_gettime_dynamic, DynamicClockId, ClockId};
#[test]
fn test_known_clocks() {
clock_gettime_dynamic(DynamicClockId::Known(ClockId::Realtime)).unwrap();
clock_gettime_dynamic(DynamicClockId::Known(ClockId::Monotonic)).unwrap();
}
#[test]
fn test_dynamic_clocks() {
let file = std::fs::File::open("Cargo.toml").unwrap();
clock_gettime_dynamic(DynamicClockId::Dynamic(file.as_fd())).unwrap_err();
}
#[cfg(any(target_os = "android", target_os = "linux"))]
#[test]
fn test_conditional_clocks() {
let _ = clock_gettime_dynamic(DynamicClockId::Tai);
}
|
use std::thread;
use std::sync::mpsc;
use std::sync::Arc;
use std::sync::Mutex;
type Job = Box<dyn FnOnce() + Send + 'static>;
enum Message {
NewJob(Job),
Terminate
}
pub struct ThreadPool {
// our closure passed to thread::spawn() won't return anything,
// so return type of unit type () is used by JoinHandle<T>
workers: Vec<Worker>,
sender: mpsc::Sender<Message>,
}
impl ThreadPool {
/// Create a new ThreadPool.
///
/// The size is the number of threads in the pool.
///
/// # Panics
///
/// The `new` function will panic if the size is zero.
pub fn new(size: usize) -> ThreadPool {
assert!(size > 0);
let mut workers = Vec::with_capacity(size);
let (tx,rx) = mpsc::channel();
let rx = Arc::new(Mutex::new(rx));
for idx in 1..size+1 {
workers.push(Worker::new(idx, Arc::clone(&rx)));
}
ThreadPool { workers, sender:tx }
}
pub fn execute<F>(&self, f: F)
where
F: FnOnce() + Send + 'static,
{
let job = Box::new(f);
self.sender.send(Message::NewJob(job)).unwrap();
println!("Request sent to Worker channel");
}
pub fn terminate(&mut self) {
println!("Sending terminate message to all workers.");
// send any many Terminate msssage as there are Workers
for _ in &self.workers {
self.sender.send(Message::Terminate).unwrap();
}
println!("Shutting down all workers.");
for worker in &mut self.workers {
println!("Shutting down worker {}", worker.id);
if let Some(thread) = worker.thread.take() {
thread.join().unwrap();
}
}
}
}
impl Drop for ThreadPool {
fn drop(&mut self) {
println!("Dropping off..bye bye!")
}
}
struct Worker {
id: usize,
thread: Option<thread::JoinHandle<()>>,
}
impl Worker {
/// Create a new Worker.
///
fn new(id: usize, receiver: Arc<Mutex<mpsc::Receiver<Message>>>) -> Worker {
// create new thread with empty closure
let thread = thread::spawn(move || loop {
let message = receiver.lock().unwrap().recv().unwrap();
match message {
Message::NewJob(job) => {
println!("Worker {} received new job; executing.", id);
job();
}
Message::Terminate => {
println!("Worker {} was told to TERMINATE.", id);
break;
}
}
});
Worker { id, thread: Some(thread) }
}
}
|
/// when to us Box<T>
// 1. When you have a type whose size can’t be known at
// compile time and you want to use a value of that type
// in a context that requires an exact size
// 2. When you have a large amount of data and you want
// to transfer ownership but ensure the data won’t be
// copied when you do so
// 3. When you want to own a value and you care only that
// it’s a type that implements a particular trait rather
// than being of a specific type
/// At compile time, Rust needs to know how much space a type takes up
// -> recursive type
use std::ops::Deref;
mod mock;
// https://doc.rust-lang.org/rust-by-example/custom_types/enum/testcase_linked_list.html
fn deref_use() {
let x = 5;
let y = &x;
assert_eq!(x, *y);
// using Box<T> to reimplement
// an instance of a box pointing to the value in x
// rather than a reference pointing to the value of x.
let y_box = Box::new(x);
assert_eq!(x, *y_box);
let z_mybox = MyBox::new(x);
// behind the scenes Rust actually ran this code: *(z_mybox.deref())
// Note that the * operator is replaced with a call to the deref method
// and then a call to the * operator just once, each time we use a * in our code
assert_eq!(5, *z_mybox);
}
// 2. create my Box(T)
struct MyBox<T>(T);
impl<T> MyBox<T> {
fn new(x: T) -> MyBox<T> {
MyBox(x)
}
}
// treating a type like a reference by implement the `Deref` Trait
impl<T> Deref for MyBox<T> {
type Target = T;
fn deref(&self) -> &T {
&self.0
}
}
// 3. `Drop` trait
// Specify the code to run when a value goes out of scope by implementing
// the Drop trait. The Drop trait requires you to implement one method named
// drop that takes a mutable reference to self.
#[derive(Debug)]
struct CustomSmartPointer {
data: String,
}
impl Drop for CustomSmartPointer {
// destructor
fn drop(&mut self) {
println!("Dropping with data `{}`!", self.data);
}
}
// 4. The reference counted smart pointer
// To enable multiple ownership, Rust has a type called Rc<T>
// Note that Rc<T> is only for use in single-threaded scenarios
#[derive(Debug)]
enum List {
Cons(i32, Rc<List>), // Box<List>
Nil,
}
use crate::List::{Cons, Nil};
use std::rc::Rc;
// Via immutable references, Rc<T> allows you to share data
// between multiple parts of your program for reading only
fn use_rc_create_cons() {
let strong_count = |a| Rc::strong_count(a);
let a = Rc::new(Cons(5, Rc::new(Cons(10, Rc::new(Nil)))));
println!("count after creating a = {}", strong_count(&a));
// The call to Rc::clone only increments the reference count,
// which doesn’t take much time. Deep copies of data can take a lot of time
let b = Cons(3, Rc::clone(&a));
// cloning an `Rc<T>` increases the reference count
println!("count after creating b = {}", strong_count(&a));
{
// the implementation of the Drop trait decreases the
// reference count automatically when an Rc<T> value goes out of scope.
let _c = Cons(4, Rc::clone(&a));
println!("count after creating c = {}", strong_count(&a));
}
println!("count after drop c = {}", strong_count(&a));
println!("b = {:?}\n", b);
}
// 5. `RefCell<T>` and interior mutability pattern
/// Interior mutability is a design pattern in Rust
/// that allows you to mutate data even when there
/// are immutable references to that data
// About Borrowing rules:
/// a. At any given time, you can have either (but not both of)
/// one mutable reference or any number of immutable references.
/// b. References must always be valid.
/// With references and Box<T>, the borrowing rules’ invariants
/// are enforced at compile time. With RefCell<T>, these invariants
/// are enforced at runtime. With references, if you break these
/// rules, you’ll get a compiler error. With RefCell<T>, if
/// you break these rules, your program will panic and exit.
/// Similar to Rc<T>, RefCell<T> is only for use in single-threaded
/// scenarios and will give you a compile-time error if you try using
/// it in a multithreaded context.
/// Here is a recap of the reasons to choose Box<T>, Rc<T>, or RefCell<T>:
/// a. Rc<T> enables multiple owners of the same data; Box<T> and RefCell<T> have single owners.
/// b. Box<T> allows immutable or mutable borrows checked at compile time;
/// Rc<T> allows only immutable borrows checked at compile time; RefCell<T>
/// allows immutable or mutable borrows checked at runtime.
/// c. Because RefCell<T> allows mutable borrows checked at runtime,
/// you can mutate the value inside the RefCell<T> even when the RefCell<T> is immutable.
// Mutating the value inside an immutable value is the interior mutability pattern.
#[allow(dead_code)]
fn interior_mutability() {
let _x = 5;
// !! cannot borrow immutable local variable `x` as mutable
// let y = &mut x;
}
/// Having multiple owners of mutable data by combining Rc<T> and RefCell<T>
/// A common way to use RefCell<T> is in combination with Rc<T>. Recall that
/// Rc<T> lets you have multiple owners of some data, but it only gives
/// immutable access to that data. If you have an Rc<T> that holds a RefCell<T>,
/// you can get a value that can have multiple owners and that you can mutate!
#[derive(Debug)]
enum MutList {
MutCons(Rc<RefCell<i32>>, Rc<MutList>),
Nil
}
use crate::MutList::{MutCons, Nil as Null};
use std::cell::RefCell;
fn multi_owners_mutable_data() {
let value = Rc::new(RefCell::new(5));
let a = Rc::new(MutCons(Rc::clone(&value), Rc::new(Null)));
let b = MutCons(Rc::new(RefCell::new(6)), Rc::clone(&a));
let c = MutCons(Rc::new(RefCell::new(7)), Rc::clone(&a));
// The `borrow_mut` method returns a RefMut<T> smart pointer,
// and we use the dereference operator on it and change the inner value.
*value.borrow_mut() += 10;
println!("a after = {:?}", a);
println!("b after = {:?}", b);
println!("c after = {:?}", c);
println!("\n")
}
/// 6. Reference cycles can leak memory
mod reference;
use reference::cycle_reference::CycList::{ Cons as CycleCons, Nil as CycleNil };
fn cycle_reference() {
let strong_count = |a| Rc::strong_count(a);
let a = Rc::new(CycleCons(5, RefCell::new(Rc::new(CycleNil))));
println!("a initial rc count = {}", strong_count(&a));
println!("a next item: {:?}", a.tail());
let b = Rc::new(CycleCons(10, RefCell::new(Rc::clone(&a))));
println!("a rc count after b creation = {}", strong_count(&a));
println!("b initial rc count = {}", strong_count(&b));
println!("b next item = {:?}", b.tail());
if let Some(link) = a.tail() {
*link.borrow_mut() = Rc::clone(&b);
}
println!("b rc count after changing a = {}", strong_count(&b));
println!("a rc count after changing a = {}", strong_count(&a));
println!("\n");
// trigger panic due to cycle reference
// !!thread 'main' has overflowed its stack
// println!("a next item = {:?}", a.tail());
}
fn main() {
// 2. dereference
deref_use();
// 3. drop trait
let a = CustomSmartPointer{ data: String::from("my stuff") };
let b = CustomSmartPointer{ data: String::from("other stuff") };
// the ownership system that makes sure references are always
// valid also ensures that drop gets called only once when the
// value is no longer being used.
// use std::mem::drop
drop(a);
println!("CustomSmartPointer created {:?}", b);
// 4. Rc<T>
println!("## Rc<T> multiple reference");
use_rc_create_cons();
// 5. RefCell<T> and Rc<T>
println!("## multi owners mutable data");
multi_owners_mutable_data();
// 6. Cycle reference
println!("## cycle reference");
cycle_reference();
}
|
mod seconds;
mod minutes;
mod hours;
mod days_of_month;
mod months;
mod days_of_week;
mod years;
pub use self::seconds::Seconds;
pub use self::minutes::Minutes;
pub use self::hours::Hours;
pub use self::days_of_month::DaysOfMonth;
pub use self::months::Months;
pub use self::days_of_week::DaysOfWeek;
pub use self::years::Years;
use schedule::{Specifier, Ordinal, OrdinalSet, ExpressionError};
use std::borrow::Cow;
use std::iter;
pub trait TimeUnitField
where Self: Sized
{
fn from_ordinal_set(ordinal_set: OrdinalSet) -> Self;
fn name() -> Cow<'static, str>;
fn inclusive_min() -> Ordinal;
fn inclusive_max() -> Ordinal;
fn ordinals(&self) -> &OrdinalSet;
fn from_ordinal(ordinal: Ordinal) -> Self {
Self::from_ordinal_set(iter::once(ordinal).collect())
}
fn supported_ordinals() -> OrdinalSet {
(Self::inclusive_min()..Self::inclusive_max() + 1).collect()
}
fn all() -> Self {
Self::from_ordinal_set(Self::supported_ordinals())
}
fn ordinal_from_name(name: &str) -> Result<Ordinal, ExpressionError> {
Err(ExpressionError(format!("The '{}' field does not support using names. '{}' \
specified.",
Self::name(),
name)))
}
fn validate_ordinal(ordinal: Ordinal) -> Result<Ordinal, ExpressionError> {
//println!("validate_ordinal for {} => {}", Self::name(), ordinal);
match ordinal {
i if i < Self::inclusive_min() => {
Err(ExpressionError(format!("{} must be greater than or equal to {}. ('{}' \
specified.)",
Self::name(),
Self::inclusive_min(),
i)))
}
i if i > Self::inclusive_max() => {
Err(ExpressionError(format!("{} must be less than {}. ('{}' specified.)",
Self::name(),
Self::inclusive_max(),
i)))
}
i => Ok(i),
}
}
fn ordinals_from_specifier(specifier: &Specifier) -> Result<OrdinalSet, ExpressionError> {
use self::Specifier::*;
//println!("ordinals_from_specifier for {} => {:?}", Self::name(), specifier);
match *specifier {
All => Ok(Self::supported_ordinals()),
Point(ordinal) => Ok((&[ordinal]).iter().cloned().collect()),
NamedPoint(ref name) => {
Ok((&[Self::ordinal_from_name(name)?]).iter().cloned().collect())
}
Period(start, step) => {
let start = Self::validate_ordinal(start)?;
Ok((start..Self::inclusive_max() + 1).step_by(step).collect())
}
Range(start, end) => {
match (Self::validate_ordinal(start), Self::validate_ordinal(end)) {
(Ok(start), Ok(end)) if start <= end => Ok((start..end + 1).collect()),
_ => {
Err(ExpressionError(format!("Invalid range for {}: {}-{}",
Self::name(),
start,
end)))
}
}
}
NamedRange(ref start_name, ref end_name) => {
let start = Self::ordinal_from_name(start_name)?;
let end = Self::ordinal_from_name(end_name)?;
match (Self::validate_ordinal(start), Self::validate_ordinal(end)) {
(Ok(start), Ok(end)) if start <= end => Ok((start..end + 1).collect()),
_ => {
Err(ExpressionError(format!("Invalid named range for {}: {}-{}",
Self::name(),
start_name,
end_name)))
}
}
}
}
}
}
|
use std::fs::File;
use std::io::Read;
pub mod life_105;
pub mod life_106;
pub mod plaintext;
pub mod rle;
/// Describes what type of file it is based on the file extension.
pub enum FileType {
Life,
PlainText,
RLE,
}
impl FileType {
/// Parses the file type from filename.
pub fn from_filename<S: AsRef<str>>(s: &S) -> Option<FileType> {
let s = s.as_ref();
if s.ends_with("lif") || s.ends_with("life") {
Some(FileType::Life)
} else if s.ends_with("cells") {
Some(FileType::PlainText)
} else if s.ends_with("rle") {
Some(FileType::RLE)
} else {
None
}
}
}
#[derive(Default)]
pub struct Pattern {
pub cells: Vec<(isize, isize)>,
pub name: Option<String>,
pub description: Option<String>,
pub author: Option<String>,
}
impl Pattern {
pub fn from_file<S: AsRef<str>>(filename: S) -> Result<Pattern, String> {
let filename = filename.as_ref();
// Read file and get rules from them.
let mut file = match File::open(filename) {
Ok(f) => f,
Err(e) => return Err(format!("Could not open file: {}", e)),
};
let mut contents = String::new();
if let Err(e) = file.read_to_string(&mut contents) {
return Err(format!("Could not read file to string: {}", e));
}
let file_type: FileType =
FileType::from_filename(&filename).expect("Unrecognised file type.");
match file_type {
FileType::Life => {
if life_106::is_life_106_file(&contents) {
life_106::parse_life_106_file(&contents)
} else if life_105::is_life_105_file(&contents) {
life_105::parse_life_105_file(&contents)
} else {
Err(String::from("File was classified as Life but it misses all of the known headers: `#Life 1.06` and `#Life 1.05`."))
}
}
FileType::PlainText => {
if plaintext::is_plaintext_file(&contents) {
plaintext::parse_plaintext_file(&contents)
} else {
Err(String::from("File was classified as a plaintext file (`.cells`) but it doesn't start with `!Name: `."))
}
}
FileType::RLE => rle::parse_rle_file(&contents),
}
}
}
|
#[doc = "Register `MPCBB2_VCTR58` reader"]
pub type R = crate::R<MPCBB2_VCTR58_SPEC>;
#[doc = "Register `MPCBB2_VCTR58` writer"]
pub type W = crate::W<MPCBB2_VCTR58_SPEC>;
#[doc = "Field `B1856` reader - B1856"]
pub type B1856_R = crate::BitReader;
#[doc = "Field `B1856` writer - B1856"]
pub type B1856_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1857` reader - B1857"]
pub type B1857_R = crate::BitReader;
#[doc = "Field `B1857` writer - B1857"]
pub type B1857_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1858` reader - B1858"]
pub type B1858_R = crate::BitReader;
#[doc = "Field `B1858` writer - B1858"]
pub type B1858_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1859` reader - B1859"]
pub type B1859_R = crate::BitReader;
#[doc = "Field `B1859` writer - B1859"]
pub type B1859_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1860` reader - B1860"]
pub type B1860_R = crate::BitReader;
#[doc = "Field `B1860` writer - B1860"]
pub type B1860_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1861` reader - B1861"]
pub type B1861_R = crate::BitReader;
#[doc = "Field `B1861` writer - B1861"]
pub type B1861_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1862` reader - B1862"]
pub type B1862_R = crate::BitReader;
#[doc = "Field `B1862` writer - B1862"]
pub type B1862_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1863` reader - B1863"]
pub type B1863_R = crate::BitReader;
#[doc = "Field `B1863` writer - B1863"]
pub type B1863_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1864` reader - B1864"]
pub type B1864_R = crate::BitReader;
#[doc = "Field `B1864` writer - B1864"]
pub type B1864_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1865` reader - B1865"]
pub type B1865_R = crate::BitReader;
#[doc = "Field `B1865` writer - B1865"]
pub type B1865_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1866` reader - B1866"]
pub type B1866_R = crate::BitReader;
#[doc = "Field `B1866` writer - B1866"]
pub type B1866_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1867` reader - B1867"]
pub type B1867_R = crate::BitReader;
#[doc = "Field `B1867` writer - B1867"]
pub type B1867_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1868` reader - B1868"]
pub type B1868_R = crate::BitReader;
#[doc = "Field `B1868` writer - B1868"]
pub type B1868_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1869` reader - B1869"]
pub type B1869_R = crate::BitReader;
#[doc = "Field `B1869` writer - B1869"]
pub type B1869_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1870` reader - B1870"]
pub type B1870_R = crate::BitReader;
#[doc = "Field `B1870` writer - B1870"]
pub type B1870_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1871` reader - B1871"]
pub type B1871_R = crate::BitReader;
#[doc = "Field `B1871` writer - B1871"]
pub type B1871_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1872` reader - B1872"]
pub type B1872_R = crate::BitReader;
#[doc = "Field `B1872` writer - B1872"]
pub type B1872_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1873` reader - B1873"]
pub type B1873_R = crate::BitReader;
#[doc = "Field `B1873` writer - B1873"]
pub type B1873_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1874` reader - B1874"]
pub type B1874_R = crate::BitReader;
#[doc = "Field `B1874` writer - B1874"]
pub type B1874_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1875` reader - B1875"]
pub type B1875_R = crate::BitReader;
#[doc = "Field `B1875` writer - B1875"]
pub type B1875_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1876` reader - B1876"]
pub type B1876_R = crate::BitReader;
#[doc = "Field `B1876` writer - B1876"]
pub type B1876_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1877` reader - B1877"]
pub type B1877_R = crate::BitReader;
#[doc = "Field `B1877` writer - B1877"]
pub type B1877_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1878` reader - B1878"]
pub type B1878_R = crate::BitReader;
#[doc = "Field `B1878` writer - B1878"]
pub type B1878_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1879` reader - B1879"]
pub type B1879_R = crate::BitReader;
#[doc = "Field `B1879` writer - B1879"]
pub type B1879_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1880` reader - B1880"]
pub type B1880_R = crate::BitReader;
#[doc = "Field `B1880` writer - B1880"]
pub type B1880_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1881` reader - B1881"]
pub type B1881_R = crate::BitReader;
#[doc = "Field `B1881` writer - B1881"]
pub type B1881_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1882` reader - B1882"]
pub type B1882_R = crate::BitReader;
#[doc = "Field `B1882` writer - B1882"]
pub type B1882_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1883` reader - B1883"]
pub type B1883_R = crate::BitReader;
#[doc = "Field `B1883` writer - B1883"]
pub type B1883_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1884` reader - B1884"]
pub type B1884_R = crate::BitReader;
#[doc = "Field `B1884` writer - B1884"]
pub type B1884_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1885` reader - B1885"]
pub type B1885_R = crate::BitReader;
#[doc = "Field `B1885` writer - B1885"]
pub type B1885_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1886` reader - B1886"]
pub type B1886_R = crate::BitReader;
#[doc = "Field `B1886` writer - B1886"]
pub type B1886_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B1887` reader - B1887"]
pub type B1887_R = crate::BitReader;
#[doc = "Field `B1887` writer - B1887"]
pub type B1887_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - B1856"]
#[inline(always)]
pub fn b1856(&self) -> B1856_R {
B1856_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - B1857"]
#[inline(always)]
pub fn b1857(&self) -> B1857_R {
B1857_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - B1858"]
#[inline(always)]
pub fn b1858(&self) -> B1858_R {
B1858_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - B1859"]
#[inline(always)]
pub fn b1859(&self) -> B1859_R {
B1859_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - B1860"]
#[inline(always)]
pub fn b1860(&self) -> B1860_R {
B1860_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - B1861"]
#[inline(always)]
pub fn b1861(&self) -> B1861_R {
B1861_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - B1862"]
#[inline(always)]
pub fn b1862(&self) -> B1862_R {
B1862_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - B1863"]
#[inline(always)]
pub fn b1863(&self) -> B1863_R {
B1863_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - B1864"]
#[inline(always)]
pub fn b1864(&self) -> B1864_R {
B1864_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - B1865"]
#[inline(always)]
pub fn b1865(&self) -> B1865_R {
B1865_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - B1866"]
#[inline(always)]
pub fn b1866(&self) -> B1866_R {
B1866_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - B1867"]
#[inline(always)]
pub fn b1867(&self) -> B1867_R {
B1867_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - B1868"]
#[inline(always)]
pub fn b1868(&self) -> B1868_R {
B1868_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - B1869"]
#[inline(always)]
pub fn b1869(&self) -> B1869_R {
B1869_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - B1870"]
#[inline(always)]
pub fn b1870(&self) -> B1870_R {
B1870_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - B1871"]
#[inline(always)]
pub fn b1871(&self) -> B1871_R {
B1871_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - B1872"]
#[inline(always)]
pub fn b1872(&self) -> B1872_R {
B1872_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - B1873"]
#[inline(always)]
pub fn b1873(&self) -> B1873_R {
B1873_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - B1874"]
#[inline(always)]
pub fn b1874(&self) -> B1874_R {
B1874_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - B1875"]
#[inline(always)]
pub fn b1875(&self) -> B1875_R {
B1875_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - B1876"]
#[inline(always)]
pub fn b1876(&self) -> B1876_R {
B1876_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - B1877"]
#[inline(always)]
pub fn b1877(&self) -> B1877_R {
B1877_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - B1878"]
#[inline(always)]
pub fn b1878(&self) -> B1878_R {
B1878_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - B1879"]
#[inline(always)]
pub fn b1879(&self) -> B1879_R {
B1879_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - B1880"]
#[inline(always)]
pub fn b1880(&self) -> B1880_R {
B1880_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 25 - B1881"]
#[inline(always)]
pub fn b1881(&self) -> B1881_R {
B1881_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - B1882"]
#[inline(always)]
pub fn b1882(&self) -> B1882_R {
B1882_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 27 - B1883"]
#[inline(always)]
pub fn b1883(&self) -> B1883_R {
B1883_R::new(((self.bits >> 27) & 1) != 0)
}
#[doc = "Bit 28 - B1884"]
#[inline(always)]
pub fn b1884(&self) -> B1884_R {
B1884_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - B1885"]
#[inline(always)]
pub fn b1885(&self) -> B1885_R {
B1885_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - B1886"]
#[inline(always)]
pub fn b1886(&self) -> B1886_R {
B1886_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - B1887"]
#[inline(always)]
pub fn b1887(&self) -> B1887_R {
B1887_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - B1856"]
#[inline(always)]
#[must_use]
pub fn b1856(&mut self) -> B1856_W<MPCBB2_VCTR58_SPEC, 0> {
B1856_W::new(self)
}
#[doc = "Bit 1 - B1857"]
#[inline(always)]
#[must_use]
pub fn b1857(&mut self) -> B1857_W<MPCBB2_VCTR58_SPEC, 1> {
B1857_W::new(self)
}
#[doc = "Bit 2 - B1858"]
#[inline(always)]
#[must_use]
pub fn b1858(&mut self) -> B1858_W<MPCBB2_VCTR58_SPEC, 2> {
B1858_W::new(self)
}
#[doc = "Bit 3 - B1859"]
#[inline(always)]
#[must_use]
pub fn b1859(&mut self) -> B1859_W<MPCBB2_VCTR58_SPEC, 3> {
B1859_W::new(self)
}
#[doc = "Bit 4 - B1860"]
#[inline(always)]
#[must_use]
pub fn b1860(&mut self) -> B1860_W<MPCBB2_VCTR58_SPEC, 4> {
B1860_W::new(self)
}
#[doc = "Bit 5 - B1861"]
#[inline(always)]
#[must_use]
pub fn b1861(&mut self) -> B1861_W<MPCBB2_VCTR58_SPEC, 5> {
B1861_W::new(self)
}
#[doc = "Bit 6 - B1862"]
#[inline(always)]
#[must_use]
pub fn b1862(&mut self) -> B1862_W<MPCBB2_VCTR58_SPEC, 6> {
B1862_W::new(self)
}
#[doc = "Bit 7 - B1863"]
#[inline(always)]
#[must_use]
pub fn b1863(&mut self) -> B1863_W<MPCBB2_VCTR58_SPEC, 7> {
B1863_W::new(self)
}
#[doc = "Bit 8 - B1864"]
#[inline(always)]
#[must_use]
pub fn b1864(&mut self) -> B1864_W<MPCBB2_VCTR58_SPEC, 8> {
B1864_W::new(self)
}
#[doc = "Bit 9 - B1865"]
#[inline(always)]
#[must_use]
pub fn b1865(&mut self) -> B1865_W<MPCBB2_VCTR58_SPEC, 9> {
B1865_W::new(self)
}
#[doc = "Bit 10 - B1866"]
#[inline(always)]
#[must_use]
pub fn b1866(&mut self) -> B1866_W<MPCBB2_VCTR58_SPEC, 10> {
B1866_W::new(self)
}
#[doc = "Bit 11 - B1867"]
#[inline(always)]
#[must_use]
pub fn b1867(&mut self) -> B1867_W<MPCBB2_VCTR58_SPEC, 11> {
B1867_W::new(self)
}
#[doc = "Bit 12 - B1868"]
#[inline(always)]
#[must_use]
pub fn b1868(&mut self) -> B1868_W<MPCBB2_VCTR58_SPEC, 12> {
B1868_W::new(self)
}
#[doc = "Bit 13 - B1869"]
#[inline(always)]
#[must_use]
pub fn b1869(&mut self) -> B1869_W<MPCBB2_VCTR58_SPEC, 13> {
B1869_W::new(self)
}
#[doc = "Bit 14 - B1870"]
#[inline(always)]
#[must_use]
pub fn b1870(&mut self) -> B1870_W<MPCBB2_VCTR58_SPEC, 14> {
B1870_W::new(self)
}
#[doc = "Bit 15 - B1871"]
#[inline(always)]
#[must_use]
pub fn b1871(&mut self) -> B1871_W<MPCBB2_VCTR58_SPEC, 15> {
B1871_W::new(self)
}
#[doc = "Bit 16 - B1872"]
#[inline(always)]
#[must_use]
pub fn b1872(&mut self) -> B1872_W<MPCBB2_VCTR58_SPEC, 16> {
B1872_W::new(self)
}
#[doc = "Bit 17 - B1873"]
#[inline(always)]
#[must_use]
pub fn b1873(&mut self) -> B1873_W<MPCBB2_VCTR58_SPEC, 17> {
B1873_W::new(self)
}
#[doc = "Bit 18 - B1874"]
#[inline(always)]
#[must_use]
pub fn b1874(&mut self) -> B1874_W<MPCBB2_VCTR58_SPEC, 18> {
B1874_W::new(self)
}
#[doc = "Bit 19 - B1875"]
#[inline(always)]
#[must_use]
pub fn b1875(&mut self) -> B1875_W<MPCBB2_VCTR58_SPEC, 19> {
B1875_W::new(self)
}
#[doc = "Bit 20 - B1876"]
#[inline(always)]
#[must_use]
pub fn b1876(&mut self) -> B1876_W<MPCBB2_VCTR58_SPEC, 20> {
B1876_W::new(self)
}
#[doc = "Bit 21 - B1877"]
#[inline(always)]
#[must_use]
pub fn b1877(&mut self) -> B1877_W<MPCBB2_VCTR58_SPEC, 21> {
B1877_W::new(self)
}
#[doc = "Bit 22 - B1878"]
#[inline(always)]
#[must_use]
pub fn b1878(&mut self) -> B1878_W<MPCBB2_VCTR58_SPEC, 22> {
B1878_W::new(self)
}
#[doc = "Bit 23 - B1879"]
#[inline(always)]
#[must_use]
pub fn b1879(&mut self) -> B1879_W<MPCBB2_VCTR58_SPEC, 23> {
B1879_W::new(self)
}
#[doc = "Bit 24 - B1880"]
#[inline(always)]
#[must_use]
pub fn b1880(&mut self) -> B1880_W<MPCBB2_VCTR58_SPEC, 24> {
B1880_W::new(self)
}
#[doc = "Bit 25 - B1881"]
#[inline(always)]
#[must_use]
pub fn b1881(&mut self) -> B1881_W<MPCBB2_VCTR58_SPEC, 25> {
B1881_W::new(self)
}
#[doc = "Bit 26 - B1882"]
#[inline(always)]
#[must_use]
pub fn b1882(&mut self) -> B1882_W<MPCBB2_VCTR58_SPEC, 26> {
B1882_W::new(self)
}
#[doc = "Bit 27 - B1883"]
#[inline(always)]
#[must_use]
pub fn b1883(&mut self) -> B1883_W<MPCBB2_VCTR58_SPEC, 27> {
B1883_W::new(self)
}
#[doc = "Bit 28 - B1884"]
#[inline(always)]
#[must_use]
pub fn b1884(&mut self) -> B1884_W<MPCBB2_VCTR58_SPEC, 28> {
B1884_W::new(self)
}
#[doc = "Bit 29 - B1885"]
#[inline(always)]
#[must_use]
pub fn b1885(&mut self) -> B1885_W<MPCBB2_VCTR58_SPEC, 29> {
B1885_W::new(self)
}
#[doc = "Bit 30 - B1886"]
#[inline(always)]
#[must_use]
pub fn b1886(&mut self) -> B1886_W<MPCBB2_VCTR58_SPEC, 30> {
B1886_W::new(self)
}
#[doc = "Bit 31 - B1887"]
#[inline(always)]
#[must_use]
pub fn b1887(&mut self) -> B1887_W<MPCBB2_VCTR58_SPEC, 31> {
B1887_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "MPCBBx vector register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mpcbb2_vctr58::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mpcbb2_vctr58::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MPCBB2_VCTR58_SPEC;
impl crate::RegisterSpec for MPCBB2_VCTR58_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`mpcbb2_vctr58::R`](R) reader structure"]
impl crate::Readable for MPCBB2_VCTR58_SPEC {}
#[doc = "`write(|w| ..)` method takes [`mpcbb2_vctr58::W`](W) writer structure"]
impl crate::Writable for MPCBB2_VCTR58_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets MPCBB2_VCTR58 to value 0"]
impl crate::Resettable for MPCBB2_VCTR58_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[doc = "Register `APB1H_FZ` reader"]
pub type R = crate::R<APB1H_FZ_SPEC>;
#[doc = "Register `APB1H_FZ` writer"]
pub type W = crate::W<APB1H_FZ_SPEC>;
#[doc = "Field `DBG_I2C4_STOP` reader - DBG_I2C4_STOP"]
pub type DBG_I2C4_STOP_R = crate::BitReader;
#[doc = "Field `DBG_I2C4_STOP` writer - DBG_I2C4_STOP"]
pub type DBG_I2C4_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 1 - DBG_I2C4_STOP"]
#[inline(always)]
pub fn dbg_i2c4_stop(&self) -> DBG_I2C4_STOP_R {
DBG_I2C4_STOP_R::new(((self.bits >> 1) & 1) != 0)
}
}
impl W {
#[doc = "Bit 1 - DBG_I2C4_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_i2c4_stop(&mut self) -> DBG_I2C4_STOP_W<APB1H_FZ_SPEC, 1> {
DBG_I2C4_STOP_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "APB Low Freeze Register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb1h_fz::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb1h_fz::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct APB1H_FZ_SPEC;
impl crate::RegisterSpec for APB1H_FZ_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`apb1h_fz::R`](R) reader structure"]
impl crate::Readable for APB1H_FZ_SPEC {}
#[doc = "`write(|w| ..)` method takes [`apb1h_fz::W`](W) writer structure"]
impl crate::Writable for APB1H_FZ_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets APB1H_FZ to value 0"]
impl crate::Resettable for APB1H_FZ_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::{collections::HashMap, io::Write};
use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
use bytes::Bytes;
use thiserror::Error;
use tokio::sync::oneshot;
use crate::{packet::PacketType, util::SeqGenerator, Error, FileId, SpotifyId};
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)]
pub struct AudioKey(pub [u8; 16]);
#[derive(Debug, Error)]
pub enum AudioKeyError {
#[error("audio key error")]
AesKey,
#[error("other end of channel disconnected")]
Channel,
#[error("unexpected packet type {0}")]
Packet(u8),
#[error("sequence {0} not pending")]
Sequence(u32),
}
impl From<AudioKeyError> for Error {
fn from(err: AudioKeyError) -> Self {
match err {
AudioKeyError::AesKey => Error::unavailable(err),
AudioKeyError::Channel => Error::aborted(err),
AudioKeyError::Sequence(_) => Error::aborted(err),
AudioKeyError::Packet(_) => Error::unimplemented(err),
}
}
}
component! {
AudioKeyManager : AudioKeyManagerInner {
sequence: SeqGenerator<u32> = SeqGenerator::new(0),
pending: HashMap<u32, oneshot::Sender<Result<AudioKey, Error>>> = HashMap::new(),
}
}
impl AudioKeyManager {
pub(crate) fn dispatch(&self, cmd: PacketType, mut data: Bytes) -> Result<(), Error> {
let seq = BigEndian::read_u32(data.split_to(4).as_ref());
let sender = self
.lock(|inner| inner.pending.remove(&seq))
.ok_or(AudioKeyError::Sequence(seq))?;
match cmd {
PacketType::AesKey => {
let mut key = [0u8; 16];
key.copy_from_slice(data.as_ref());
sender
.send(Ok(AudioKey(key)))
.map_err(|_| AudioKeyError::Channel)?
}
PacketType::AesKeyError => {
error!(
"error audio key {:x} {:x}",
data.as_ref()[0],
data.as_ref()[1]
);
sender
.send(Err(AudioKeyError::AesKey.into()))
.map_err(|_| AudioKeyError::Channel)?
}
_ => {
trace!(
"Did not expect {:?} AES key packet with data {:#?}",
cmd,
data
);
return Err(AudioKeyError::Packet(cmd as u8).into());
}
}
Ok(())
}
pub async fn request(&self, track: SpotifyId, file: FileId) -> Result<AudioKey, Error> {
let (tx, rx) = oneshot::channel();
let seq = self.lock(move |inner| {
let seq = inner.sequence.get();
inner.pending.insert(seq, tx);
seq
});
self.send_key_request(seq, track, file)?;
rx.await?
}
fn send_key_request(&self, seq: u32, track: SpotifyId, file: FileId) -> Result<(), Error> {
let mut data: Vec<u8> = Vec::new();
data.write_all(&file.0)?;
data.write_all(&track.to_raw())?;
data.write_u32::<BigEndian>(seq)?;
data.write_u16::<BigEndian>(0x0000)?;
self.session().send_packet(PacketType::RequestKey, data)
}
}
|
// This is not a benchmark that is meant to be taken seriously at this time. It
// was written purely to help test an in-development async runtime that this
// database will benefit from.
//
// The problems with the current speed of this database hinge on how
// ACID-compliant you wnat your data writes to be. As of writing this, there are
// no configuration options to change this, but eventually you will have control
// over whether to flush after every write or to flush periodically. Flushing
// periodically will drastically improve speed, but it potentially will lead to
// lost transactions.
//
// When operating `BonsaiDb` in a local or single-server mode, we must recommend
// flushing on each write -- the default configuration. Comparatively speaking,
// this will hurt performance in many benchmarks, including this one below. The
// purpose of this benchmark is to help test the blocking nature of sled within
// an async interface when properly marking each interaction with sled as
// blocking to the async runtime.
//
// Once clustering is available, it will be recommended to have enough
// redundancy in your architecture to allow running the cluster with periodic
// flushing enabled. Because the quorum will self-correct when an individual
// node loses data, as long as you design with enough redundancy in your
// cluster, the risk of data loss goes down drastically.
//
// TODO Some of this explanation eventually should be moved somewhere more useful
use std::{borrow::Cow, sync::Arc};
use bonsaidb_core::{
connection::Connection,
schema::{Collection, CollectionName, InvalidNameError, Schematic},
test_util::TestDirectory,
Error,
};
use bonsaidb_local::{config::Configuration, Database};
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug)]
struct ResizableDocument<'a> {
#[serde(borrow)]
data: Cow<'a, [u8]>,
}
impl<'a> Collection for ResizableDocument<'a> {
fn collection_name() -> Result<CollectionName, InvalidNameError> {
CollectionName::new("khonsulabs", "resizable-docs")
}
fn define_views(_schema: &mut Schematic) -> Result<(), Error> {
Ok(())
}
}
async fn save_document(doc: &ResizableDocument<'_>, db: &Database<ResizableDocument<'static>>) {
db.collection::<ResizableDocument<'static>>()
.push(doc)
.await
.unwrap();
}
fn criterion_benchmark(c: &mut Criterion) {
static KB: usize = 1024;
let runtime = tokio::runtime::Runtime::new().unwrap();
// First set of benchmarks tests inserting documents
let mut group = c.benchmark_group("save_documents");
for size in [KB, 2 * KB, 8 * KB, 32 * KB, KB * KB].iter() {
let mut data = Vec::with_capacity(*size);
data.resize_with(*size, || 7u8);
let doc = Arc::new(ResizableDocument {
data: Cow::Owned(data),
});
let doc = &doc;
group.throughput(Throughput::Bytes(*size as u64));
group.bench_with_input(BenchmarkId::from_parameter(*size as u64), size, |b, _| {
let path = TestDirectory::new(format!("benches-basics-{}.bonsaidb", size));
let db = runtime
.block_on(Database::open_local(&path, Configuration::default()))
.unwrap();
b.to_async(&runtime).iter(|| save_document(doc, &db));
});
}
group.finish();
// TODO bench read performance
// TODO bench read + write performance (with different numbers of readers/writers)
// TODO (once supported) bench batch saving
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
|
#![allow(clippy::extra_unused_type_parameters)]
use dyn_clone::{clone_trait_object, DynClone};
fn assert_clone<T: Clone>() {}
#[test]
fn test_plain() {
trait Trait: DynClone {}
clone_trait_object!(Trait);
assert_clone::<Box<dyn Trait>>();
assert_clone::<Box<dyn Trait + Send>>();
assert_clone::<Box<dyn Trait + Sync>>();
assert_clone::<Box<dyn Trait + Send + Sync>>();
}
#[test]
fn test_type_parameter() {
trait Trait<T>: DynClone {}
clone_trait_object!(<T> Trait<T>);
assert_clone::<Box<dyn Trait<u32>>>();
}
#[test]
fn test_generic_bound() {
trait Trait<T: PartialEq<T>, U>: DynClone {}
clone_trait_object!(<T: PartialEq<T>, U> Trait<T, U>);
assert_clone::<Box<dyn Trait<u32, ()>>>();
}
#[test]
fn test_where_clause() {
trait Trait<T>: DynClone
where
T: Clone,
{
}
clone_trait_object!(<T> Trait<T> where T: Clone);
assert_clone::<Box<dyn Trait<u32>>>();
}
#[test]
fn test_lifetime() {
trait Trait<'a>: DynClone {}
clone_trait_object!(<'a> Trait<'a>);
assert_clone::<Box<dyn Trait>>();
}
|
extern crate serde;
extern crate serde_json;
use parse_config::Task;
use std::process::Output;
#[derive(Serialize, Clone)]
pub struct TaskOutput {
pub outcome: String,
pub code: String,
pub name: String,
pub description: String,
pub command: String,
}
pub type Tasks = Vec<TaskOutput>;
#[derive(Serialize, Clone)]
pub struct SerializableOutput {
pub tasks: Vec<TaskOutput>,
}
pub fn build_task_output(output: Output, task: Task) -> TaskOutput {
TaskOutput {
outcome: String::from_utf8(output.stdout).unwrap(),
code: output.status.code().unwrap().to_string(),
name: task.name,
description: task.description,
command: task.command,
}
}
|
use std::{
env,
fs::{self, File, OpenOptions},
io::{self, Cursor, Result, Seek, Write},
path::Path,
};
use zip::{write::FileOptions, CompressionMethod, ZipWriter};
fn main() -> Result<()> {
let (from_dir, to_excel_file) = read_args(env::args());
println!(
"begin to merge, from_dir: {}, to_excel_file: {}",
&from_dir, &to_excel_file
);
merge_excel(&from_dir, &to_excel_file)?;
println!("merge done");
Ok(())
}
fn merge_excel(from_dir: &str, to_excel_file: &str) -> Result<()> {
let mut zip = ZipWriter::new(Cursor::new(Vec::default()));
let from_dir = Path::new(from_dir);
write_dir(from_dir, &mut zip)?;
let mut buffer = Cursor::new(zip.finish()?.into_inner());
let mut to_excel_file = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(to_excel_file)?;
io::copy(&mut buffer, &mut to_excel_file).map(|_| ())
}
fn write_dir<W: Write + Seek>(dir: &Path, zip: &mut ZipWriter<W>) -> Result<()> {
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
write_dir(&path, zip)?;
} else {
add_zip_file(&path, zip)?;
}
}
Ok(())
}
fn add_zip_file<W: Write + Seek>(file_path: &Path, zip: &mut ZipWriter<W>) -> Result<()> {
let mut file = File::open(file_path)?;
let excel_file_path = {
let mut file_path_iter = file_path.iter();
file_path_iter.next();
file_path_iter.as_path()
};
let options = FileOptions::default().compression_method(CompressionMethod::Deflated);
zip.start_file(excel_file_path.to_string_lossy(), options)?;
io::copy(&mut file, zip).map(|_| ())
}
fn read_args(args: env::Args) -> (String, String) {
let mut args = args.skip(1);
let from_dir = args.next().unwrap_or_else(|| "data".to_string());
let to_excel_file = args.next().unwrap_or_else(|| "merged.xlsx".to_string());
(from_dir, to_excel_file)
}
|
use aead::{generic_array::GenericArray, Aead, AeadInPlace, KeyInit, Payload};
use aes::{Aes128, Aes192, Aes256};
use ccm::{
consts::{U10, U11, U12, U13, U14, U16, U4, U6, U7, U8, U9},
Ccm,
};
use hex_literal::hex;
#[test]
fn test_data_len_check() {
let key = hex!("D7828D13B2B0BDC325A76236DF93CC6B");
let nonce = hex!("2F1DBD38CE3EDA7C23F04DD650");
type Cipher = Ccm<aes::Aes128, U10, U13>;
let key = GenericArray::from_slice(&key);
let nonce = GenericArray::from_slice(&nonce);
let c = Cipher::new(key);
let mut buf1 = [1; core::u16::MAX as usize];
let res = c.encrypt_in_place_detached(nonce, &[], &mut buf1);
assert!(res.is_ok());
let mut buf2 = [1; core::u16::MAX as usize + 1];
let res = c.encrypt_in_place_detached(nonce, &[], &mut buf2);
assert!(res.is_err());
}
/// Example test vectors from NIST SP 800-38C
#[test]
#[rustfmt::skip]
fn sp800_38c_examples() {
macro_rules! check {
(
$key:expr, $m:ty, $n:ty,
nonce: $nonce:expr, adata: $adata:expr, pt: $pt:expr, ct: $ct:expr,
) => {
let key = GenericArray::from_slice(&$key);
let c = Ccm::<aes::Aes128, $m, $n>::new(key);
let nonce = GenericArray::from_slice(&$nonce);
let res = c.encrypt(nonce, Payload { aad: &$adata, msg: &$pt })
.unwrap();
assert_eq!(res, $ct.as_ref());
let res = c.decrypt(nonce, Payload { aad: &$adata, msg: &$ct })
.unwrap();
assert_eq!(res, $pt.as_ref());
};
}
let key = hex!("40414243 44454647 48494a4b 4c4d4e4f");
check!(
key, U4, U7,
nonce: hex!("10111213 141516"),
adata: hex!("00010203 04050607"),
pt: hex!("20212223"),
ct: hex!("7162015b 4dac255d"),
);
check!(
key, U6, U8,
nonce: hex!("10111213 14151617"),
adata: hex!("00010203 04050607 08090a0b 0c0d0e0f"),
pt: hex!("20212223 24252627 28292a2b 2c2d2e2f"),
ct: hex!("d2a1f0e0 51ea5f62 081a7792 073d593d 1fc64fbf accd"),
);
check!(
key, U8, U12,
nonce: hex!("10111213 14151617 18191a1b"),
adata: hex!("00010203 04050607 08090a0b 0c0d0e0f 10111213"),
pt: hex!("
20212223 24252627 28292a2b 2c2d2e2f
30313233 34353637
"),
ct: hex!("
e3b201a9 f5b71a7a 9b1ceaec cd97e70b
6176aad9 a4428aa5 484392fb c1b09951
"),
);
let adata = (0..524288 / 8).map(|i| i as u8).collect::<Vec<u8>>();
check!(
key, U14, U13,
nonce: hex!("10111213 14151617 18191a1b 1c"),
adata: adata,
pt: hex!("
20212223 24252627 28292a2b 2c2d2e2f
30313233 34353637 38393a3b 3c3d3e3f
"),
ct: hex!("
69915dad 1e84c637 6a68c296 7e4dab61
5ae0fd1f aec44cc4 84828529 463ccf72
b4ac6bec 93e8598e 7f0dadbc ea5b
"),
);
}
// Test vectors from https://tools.ietf.org/html/rfc3610
aead::new_test!(rfc3610_ccm_aes128_8_13, "rfc3610_ccm_aes128_8_13", Ccm<Aes128, U8, U13>);
aead::new_test!(rfc3610_ccm_aes128_10_13, "rfc3610_ccm_aes128_10_13", Ccm<Aes128, U10, U13>);
// Test vectors from CAVP:
// https://csrc.nist.gov/Projects/cryptographic-algorithm-validation-program/CAVP-TESTING-BLOCK-CIPHER-MODES
aead::new_test!(cavp_ccm_aes128_4_7, "cavp_ccm_aes128_4_7", Ccm<Aes128, U4, U7>);
aead::new_test!(cavp_ccm_aes128_4_13, "cavp_ccm_aes128_4_13", Ccm<Aes128, U4, U13>);
aead::new_test!(cavp_ccm_aes128_6_13, "cavp_ccm_aes128_6_13", Ccm<Aes128, U6, U13>);
aead::new_test!(cavp_ccm_aes128_8_13, "cavp_ccm_aes128_8_13", Ccm<Aes128, U8, U13>);
aead::new_test!(cavp_ccm_aes128_10_13, "cavp_ccm_aes128_10_13", Ccm<Aes128, U10, U13>);
aead::new_test!(cavp_ccm_aes128_12_13, "cavp_ccm_aes128_12_13", Ccm<Aes128, U12, U13>);
aead::new_test!(cavp_ccm_aes128_14_13, "cavp_ccm_aes128_14_13", Ccm<Aes128, U14, U13>);
aead::new_test!(cavp_ccm_aes128_16_7, "cavp_ccm_aes128_16_7", Ccm<Aes128, U16, U7>);
aead::new_test!(cavp_ccm_aes128_16_8, "cavp_ccm_aes128_16_8", Ccm<Aes128, U16, U8>);
aead::new_test!(cavp_ccm_aes128_16_9, "cavp_ccm_aes128_16_9", Ccm<Aes128, U16, U9>);
aead::new_test!(cavp_ccm_aes128_16_10, "cavp_ccm_aes128_16_10", Ccm<Aes128, U16, U10>);
aead::new_test!(cavp_ccm_aes128_16_11, "cavp_ccm_aes128_16_11", Ccm<Aes128, U16, U11>);
aead::new_test!(cavp_ccm_aes128_16_12, "cavp_ccm_aes128_16_12", Ccm<Aes128, U16, U12>);
aead::new_test!(cavp_ccm_aes128_16_13, "cavp_ccm_aes128_16_13", Ccm<Aes128, U16, U13>);
aead::new_test!(cavp_ccm_aes192_4_7, "cavp_ccm_aes192_4_7", Ccm<Aes192, U4, U7>);
aead::new_test!(cavp_ccm_aes192_4_13, "cavp_ccm_aes192_4_13", Ccm<Aes192, U4, U13>);
aead::new_test!(cavp_ccm_aes192_6_13, "cavp_ccm_aes192_6_13", Ccm<Aes192, U6, U13>);
aead::new_test!(cavp_ccm_aes192_8_13, "cavp_ccm_aes192_8_13", Ccm<Aes192, U8, U13>);
aead::new_test!(cavp_ccm_aes192_10_13, "cavp_ccm_aes192_10_13", Ccm<Aes192, U10, U13>);
aead::new_test!(cavp_ccm_aes192_12_13, "cavp_ccm_aes192_12_13", Ccm<Aes192, U12, U13>);
aead::new_test!(cavp_ccm_aes192_14_13, "cavp_ccm_aes192_14_13", Ccm<Aes192, U14, U13>);
aead::new_test!(cavp_ccm_aes192_16_7, "cavp_ccm_aes192_16_7", Ccm<Aes192, U16, U7>);
aead::new_test!(cavp_ccm_aes192_16_8, "cavp_ccm_aes192_16_8", Ccm<Aes192, U16, U8>);
aead::new_test!(cavp_ccm_aes192_16_9, "cavp_ccm_aes192_16_9", Ccm<Aes192, U16, U9>);
aead::new_test!(cavp_ccm_aes192_16_10, "cavp_ccm_aes192_16_10", Ccm<Aes192, U16, U10>);
aead::new_test!(cavp_ccm_aes192_16_11, "cavp_ccm_aes192_16_11", Ccm<Aes192, U16, U11>);
aead::new_test!(cavp_ccm_aes192_16_12, "cavp_ccm_aes192_16_12", Ccm<Aes192, U16, U12>);
aead::new_test!(cavp_ccm_aes192_16_13, "cavp_ccm_aes192_16_13", Ccm<Aes192, U16, U13>);
aead::new_test!(cavp_ccm_aes256_4_7, "cavp_ccm_aes256_4_7", Ccm<Aes256, U4, U7>);
aead::new_test!(cavp_ccm_aes256_4_13, "cavp_ccm_aes256_4_13", Ccm<Aes256, U4, U13>);
aead::new_test!(cavp_ccm_aes256_6_13, "cavp_ccm_aes256_6_13", Ccm<Aes256, U6, U13>);
aead::new_test!(cavp_ccm_aes256_8_13, "cavp_ccm_aes256_8_13", Ccm<Aes256, U8, U13>);
aead::new_test!(cavp_ccm_aes256_10_13, "cavp_ccm_aes256_10_13", Ccm<Aes256, U10, U13>);
aead::new_test!(cavp_ccm_aes256_12_13, "cavp_ccm_aes256_12_13", Ccm<Aes256, U12, U13>);
aead::new_test!(cavp_ccm_aes256_14_13, "cavp_ccm_aes256_14_13", Ccm<Aes256, U14, U13>);
aead::new_test!(cavp_ccm_aes256_16_7, "cavp_ccm_aes256_16_7", Ccm<Aes256, U16, U7>);
aead::new_test!(cavp_ccm_aes256_16_8, "cavp_ccm_aes256_16_8", Ccm<Aes256, U16, U8>);
aead::new_test!(cavp_ccm_aes256_16_9, "cavp_ccm_aes256_16_9",Ccm<Aes256, U16, U9>);
aead::new_test!(cavp_ccm_aes256_16_10, "cavp_ccm_aes256_16_10", Ccm<Aes256, U16, U10>);
aead::new_test!(cavp_ccm_aes256_16_11, "cavp_ccm_aes256_16_11", Ccm<Aes256, U16, U11>);
aead::new_test!(cavp_ccm_aes256_16_12, "cavp_ccm_aes256_16_12", Ccm<Aes256, U16, U12>);
aead::new_test!(cavp_ccm_aes256_16_13, "cavp_ccm_aes256_16_13", Ccm<Aes256, U16, U13>);
|
// Copyright 2015-2019 Capital One Services, LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::authz;
use crate::Result;
use crossbeam_channel::unbounded;
use std::fs::File;
use std::io::prelude::*;
use std::{
path::Path,
sync::{Arc, RwLock},
};
use wascap::jwt::Token;
/// An actor is a WebAssembly module that can consume capabilities exposed by capability providers
#[derive(Debug)]
pub struct Actor {
pub(crate) token: Token<wascap::jwt::Actor>,
pub(crate) bytes: Vec<u8>,
}
impl Actor {
/// Create an actor from the bytes (must be a signed module) of a WebAssembly module
pub fn from_bytes(buf: Vec<u8>) -> Result<Actor> {
let token = authz::extract_claims(&buf)?;
Ok(Actor { token, bytes: buf })
}
/// Create an actor from a WebAssembly (`.wasm`) file
pub fn from_file(path: impl AsRef<Path>) -> Result<Actor> {
let mut file = File::open(path)?;
let mut buf = Vec::new();
file.read_to_end(&mut buf)?;
Actor::from_bytes(buf)
}
/// Create an actor from the Gantry registry
pub fn from_gantry(actor: &str) -> Result<Actor> {
let (s, r) = unbounded();
let bytevec = Arc::new(RwLock::new(Vec::new()));
let b = bytevec.clone();
let _ack = crate::host::GANTRYCLIENT
.read()
.unwrap()
.download_actor(actor, move |chunk| {
bytevec
.write()
.unwrap()
.extend_from_slice(&chunk.chunk_bytes);
if chunk.sequence_no == chunk.total_chunks {
s.send(true).unwrap();
}
Ok(())
});
let _ = r.recv().unwrap();
let vec = b.read().unwrap();
Actor::from_bytes(vec.clone())
}
/// Obtain the actor's public key. This is globally unique identifier
pub fn public_key(&self) -> String {
self.token.claims.subject.to_string()
}
/// The actor's human-friendly display name
pub fn name(&self) -> String {
match self.token.claims.metadata.as_ref().unwrap().name {
Some(ref n) => n.to_string(),
None => "Unnamed".to_string(),
}
}
/// Obtain the public key of the issuer of the actor's signed token
pub fn issuer(&self) -> String {
self.token.claims.issuer.to_string()
}
/// Obtain the list of capabilities declared in this actor's embedded token
pub fn capabilities(&self) -> Vec<String> {
match self.token.claims.metadata.as_ref().unwrap().caps {
Some(ref caps) => caps.clone(),
None => vec![],
}
}
/// Obtain the list of tags in the actor's token
pub fn tags(&self) -> Vec<String> {
match self.token.claims.metadata.as_ref().unwrap().tags {
Some(ref tags) => tags.clone(),
None => vec![],
}
}
}
|
pub mod models {
use octocrab::models::workflows::HeadCommit;
use octocrab::models::events::Repository;
use reqwest::Url;
use serde::*;
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[non_exhaustive]
pub struct RepositoryMinimal {
id: i64,
url: Url,
name: String,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[non_exhaustive]
pub struct HeadMininal {
#[serde(rename = "ref")]
pub ref_field: String,
pub sha: String,
pub repo: RepositoryMinimal,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[non_exhaustive]
pub struct BaseMininal {
#[serde(rename = "ref")]
pub ref_field: String,
pub sha: String,
pub repo: RepositoryMinimal,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[non_exhaustive]
pub struct PullReuqestMinimal {
pub id: i64,
pub number: i64,
pub url: Url,
pub head: HeadMininal,
pub base: BaseMininal,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[non_exhaustive]
pub struct Run {
pub id: i64,
pub workflow_id: i64,
pub node_id: String,
pub name: String,
pub head_branch: String,
pub head_sha: String,
pub run_number: i64,
pub event: String,
pub status: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub conclusion: Option<String>,
pub created_at: chrono::DateTime<chrono::Utc>,
pub updated_at: chrono::DateTime<chrono::Utc>,
pub url: Url,
pub html_url: Url,
pub jobs_url: Url,
pub logs_url: Url,
pub check_suite_url: Url,
pub artifacts_url: Url,
pub cancel_url: Url,
pub rerun_url: Url,
pub workflow_url: Url,
pub pull_requests: Vec<PullReuqestMinimal>,
// TODO: other attrs
// ref: https://docs.github.com/en/rest/reference/actions#list-workflow-runs
pub head_commit: HeadCommit,
pub repository: Repository,
pub head_repository: Repository,
}
} |
//! [`Transport`] mocks.
use crate::data::{presence, request, response};
use crate::{transport::Service, Transport};
use futures_core::future::BoxFuture;
use std::future::Future;
use std::pin::Pin;
use thiserror::Error;
use mockall::mock;
/// A dummy error used by the [`MockTransport`].
#[allow(missing_copy_implementations)]
#[derive(Debug, Error)]
#[error("mock tranport error")]
pub struct MockTransportError;
mock! {
/// Mock Transport.
pub Transport {
/// Issue a transport call.
///
/// A signlt point to mock all the services.
fn call<TReq: 'static, TRes: 'static>(
&self,
request: TReq,
) -> BoxFuture<'static, Result<TRes, MockTransportError>> {}
}
trait Clone {
fn clone(&self) -> Self {}
}
}
// We implement the mocks manually cause `mockall` doesn't play nice with
// `async_trait`.
macro_rules! impl_mock_service {
($req:ty, $res:ty) => {
// This is an expanded `async_trait` implementation.
// It's manually tailored to simply pass the control to the `mock_call`
// to avoid issues with generic type arguments inferrence.
impl Service<$req> for MockTransport {
type Response = $res;
type Error = MockTransportError;
fn call<'life0, 'async_trait>(
&'life0 self,
req: $req,
) -> Pin<
Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send + 'async_trait>,
>
where
'life0: 'async_trait,
Self: 'async_trait,
{
Box::pin(MockTransport::call(self, req))
}
}
};
}
impl_mock_service![request::Publish, response::Publish];
impl_mock_service![request::Subscribe, response::Subscribe];
impl_mock_service![request::SetState, response::SetState];
impl_mock_service![request::GetState, response::GetState];
impl_mock_service![
request::HereNow<presence::respond_with::OccupancyOnly>,
response::HereNow<presence::respond_with::OccupancyOnly>
];
impl_mock_service![
request::HereNow<presence::respond_with::OccupancyAndUUIDs>,
response::HereNow<presence::respond_with::OccupancyAndUUIDs>
];
impl_mock_service![
request::HereNow<presence::respond_with::Full>,
response::HereNow<presence::respond_with::Full>
];
impl_mock_service![
request::GlobalHereNow<presence::respond_with::OccupancyOnly>,
response::GlobalHereNow<presence::respond_with::OccupancyOnly>
];
impl_mock_service![
request::GlobalHereNow<presence::respond_with::OccupancyAndUUIDs>,
response::GlobalHereNow<presence::respond_with::OccupancyAndUUIDs>
];
impl_mock_service![
request::GlobalHereNow<presence::respond_with::Full>,
response::GlobalHereNow<presence::respond_with::Full>
];
impl_mock_service![request::WhereNow, response::WhereNow];
impl_mock_service![request::Heartbeat, response::Heartbeat];
impl_mock_service![request::Grant, response::Grant];
impl_mock_service![request::GetHistory, response::GetHistory];
impl_mock_service![request::DeleteHistory, response::DeleteHistory];
impl_mock_service![
request::MessageCountsWithTimetoken,
response::MessageCountsWithTimetoken
];
impl_mock_service![
request::MessageCountsWithChannelTimetokens,
response::MessageCountsWithChannelTimetokens
];
impl Transport for MockTransport {
type Error = MockTransportError;
}
|
use std::{env, thread};
use std::io::{Read, Write};
use std::net::{TcpListener, TcpStream};
use std::process::exit;
fn handle_connection(proxy_stream: &mut TcpStream, origin_stream: &mut TcpStream) {
let mut in_buffer: Vec<u8> = vec![0; 200];
let mut out_buffer: Vec<u8> = vec![0; 200];
if let Err(e) = proxy_stream.read(&mut in_buffer) {
println!("Error in reading from incoming proxy stream: {}", e);
} else {
println!("1: Incoming client request: {}",
String::from_utf8_lossy(&in_buffer));
}
let _ = origin_stream.write(&mut in_buffer).unwrap();
println!("2: Forwarding request to origin server\n");
let _ = origin_stream.read(&mut out_buffer).unwrap();
println!("3: Received response from origin server: {}",
String::from_utf8_lossy(&out_buffer));
let _ = proxy_stream.write(&mut out_buffer).unwrap();
println!("4: Forwarding response back to client");
}
fn main() {
let args: Vec<_> = env::args().collect();
if args.len() < 3 {
eprintln!("Provide proxy-from and proxy-to addresses");
exit(2);
}
let proxy_server = &args[1];
let origin_server = &args[2];
let proxy_listener;
if let Ok(proxy) = TcpListener::bind(proxy_server) {
proxy_listener = proxy;
let addr = proxy_listener.local_addr().unwrap().ip();
let port = proxy_listener.local_addr().unwrap().port();
if let Err(_e) = TcpStream::connect(origin_server) {
println!("Restart the origin server");
exit(1);
}
println!("Running on Addr: {}, Port: {}\n", addr, port);
} else {
eprintln!("Unable to bind to specified proxy port");
exit(1);
}
let mut thread_handles = Vec::new();
for proxy_stream in proxy_listener.incoming() {
let mut proxy_stream = proxy_stream
.expect("Error in incoming TCP connection");
let mut origin_stream = TcpStream::connect(origin_server)
.expect("Please restart the origin server");
let handle = thread::spawn(move ||
handle_connection(&mut proxy_stream, &mut origin_stream));
thread_handles.push(handle);
}
for handle in thread_handles {
handle.join().expect("Unable to join child thread");
}
}
|
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum MemberRole {
Creator,
Admin,
Participant,
Observer,
Invited
}
impl std::fmt::Display for MemberRole {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
Self::Creator => "Creator",
Self::Admin => "Admin",
Self::Participant => "Participant",
Self::Observer => "Observer",
Self::Invited => "Invited"
};
write!(f, "{}", s)
}
}
#[derive(Clone, Debug, derive_more::Constructor, Deserialize, Serialize)]
pub struct Member {
user_id: Uuid,
name: String,
role: MemberRole
}
impl Member {
pub const fn user_id(&self) -> &Uuid {
&self.user_id
}
pub const fn name(&self) -> &String {
&self.name
}
pub fn set_name(&mut self, t: String) {
self.name = t;
}
pub const fn role(&self) -> &MemberRole {
&self.role
}
pub fn set_role(&mut self, r: MemberRole) {
self.role = r;
}
}
|
/*---------------------------------------------------------------------------------------------
* Copyright © 2016-present Earth Computing Corporation. All rights reserved.
* Licensed under the MIT License. See LICENSE.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
use std::thread;
use std::sync::mpsc;
use std::time::Duration;
extern crate failure;
extern crate futures;
extern crate lazy_static;
extern crate rdkafka;
extern crate serde;
#[macro_use] extern crate serde_derive;
#[macro_use] extern crate serde_json;
extern crate time;
mod emitter;
mod streaming;
use streaming::{LogStream};
// primitive override flag(s):
pub struct TraceOptions {
pub all: bool,
pub el: bool
}
pub const TRACE_OPTIONS: TraceOptions = TraceOptions {
all: false,
el: true,
};
fn event_loop(rx: mpsc::Receiver<String>) {
let _f = "event_loop";
let worker = thread::current();
let tid = format!("{:?}", worker.id()); // looks like : "ThreadId(8)"
let name = worker.name().unwrap(); // name is guaranteed
for received in rx {
if TRACE_OPTIONS.all || TRACE_OPTIONS.el {
let ref code_attr = emitter::CodeAttributes { module: file!(), function: _f, line_no: line!(), format: "recv" };
let ref body = json!({ "tid": tid, "name": name, "recv": received });
let (key, entry) = emitter::trace(code_attr, body).unwrap();
streaming::log(key, entry);
// println!("{}", entry);
}
if received == "exit" { return; }
}
}
fn worker(i : usize, rx: mpsc::Receiver<String>) -> thread::JoinHandle<String> {
let _f = "worker";
let thread_name = format!("event_loop #{}", i);
// let mut o = self.clone();
let child_emitter = emitter::pregnant();
let h = thread::Builder::new().name(thread_name.into()).spawn(move || {
child_emitter.clone().stash(); // emitter::stash(child_emitter);
event_loop(rx);
let worker = thread::current();
format!("{:?} {}", worker.id(), worker.name().unwrap())
}).unwrap();
h
}
fn main() {
let _f = "main";
emitter::grandfather();
let log_stream = LogStream::new();
let bodies = [ "msg1", "msg2", "msg3", "msg4", "msg5", "exit" ];
let mut channels: Vec<mpsc::Sender<String>> = Vec::new();
let mut handles: Vec<thread::JoinHandle<String>> = Vec::new();
for i in 1..10 {
let (tx, rx) = mpsc::channel();
channels.push(tx);
let h = worker(i, rx);
handles.push(h);
}
for m in bodies.iter() {
for tx in channels.iter() { tx.send(m.to_string()).unwrap(); }
}
// non-optimal latency for set-join
let mut hist: Vec<Result<String, _>> = Vec::new();
while let Some(h) = handles.pop() {
let rc = h.join();
hist.push(rc);
}
for rc in hist.iter() {
let outcome = format!("{:?}", rc);
if TRACE_OPTIONS.all || TRACE_OPTIONS.el {
let ref code_attr = emitter::CodeAttributes { module: file!(), function: _f, line_no: line!(), format: "recv" };
let ref body = json!({ "outcome": outcome });
let (key, entry) = emitter::trace(code_attr, body).unwrap();
streaming::log(key, entry);
// println!("{}", entry);
}
}
log_stream.close();
// thread::sleep(Duration::from_millis(2*1000));
}
|
//! Typed access to Peter's IPC commands from Rust.
#![deny(missing_docs, rust_2018_idioms, unused, unused_import_braces, unused_lifetimes, unused_qualifications, warnings)]
#![forbid(unsafe_code)]
peter::ipc_client_lib!();
|
use std::collections::HashMap;
use brace_config::{file, Config};
#[test]
fn test_file_json() {
let cfg = file::load("tests/assets/example.json").unwrap();
assert_eq!(cfg.get("one"), Ok(String::from("Hello world")));
assert_eq!(
cfg.get("two"),
Ok({
let mut map = HashMap::new();
map.insert(String::from("a"), String::from("first"));
map.insert(String::from("b"), String::from("second"));
map
})
);
assert_eq!(cfg.get("three"), Ok(vec![1, 25, 150]));
file::save("tests/outputs/example.json", &cfg).unwrap();
let cfg = file::load("tests/outputs/example.json").unwrap();
assert_eq!(cfg.get("one"), Ok(String::from("Hello world")));
assert_eq!(
cfg.get("two"),
Ok({
let mut map = HashMap::new();
map.insert(String::from("a"), String::from("first"));
map.insert(String::from("b"), String::from("second"));
map
})
);
assert_eq!(cfg.get("three"), Ok(vec![1, 25, 150]));
}
#[test]
fn test_file_toml() {
let cfg = file::load("tests/assets/example.toml").unwrap();
assert_eq!(cfg.get("one"), Ok(String::from("Hello world")));
assert_eq!(
cfg.get("two"),
Ok({
let mut map = HashMap::new();
map.insert(String::from("a"), String::from("first"));
map.insert(String::from("b"), String::from("second"));
map
})
);
assert_eq!(cfg.get("three"), Ok(vec![1, 25, 150]));
file::save("tests/outputs/example.toml", &cfg).unwrap();
let cfg = file::load("tests/outputs/example.toml").unwrap();
assert_eq!(cfg.get("one"), Ok(String::from("Hello world")));
assert_eq!(
cfg.get("two"),
Ok({
let mut map = HashMap::new();
map.insert(String::from("a"), String::from("first"));
map.insert(String::from("b"), String::from("second"));
map
})
);
assert_eq!(cfg.get("three"), Ok(vec![1, 25, 150]));
}
#[test]
fn test_file_yaml() {
let cfg = file::load("tests/assets/example.yaml").unwrap();
assert_eq!(cfg.get("one"), Ok(String::from("Hello world")));
assert_eq!(
cfg.get("two"),
Ok({
let mut map = HashMap::new();
map.insert(String::from("a"), String::from("first"));
map.insert(String::from("b"), String::from("second"));
map
})
);
assert_eq!(cfg.get("three"), Ok(vec![1, 25, 150]));
file::save("tests/outputs/example.yaml", &cfg).unwrap();
let cfg = file::load("tests/outputs/example.yaml").unwrap();
assert_eq!(cfg.get("one"), Ok(String::from("Hello world")));
assert_eq!(
cfg.get("two"),
Ok({
let mut map = HashMap::new();
map.insert(String::from("a"), String::from("first"));
map.insert(String::from("b"), String::from("second"));
map
})
);
assert_eq!(cfg.get("three"), Ok(vec![1, 25, 150]));
}
#[test]
fn test_file_none() {
let res = file::load("tests/assets/example");
assert!(res.is_err());
let cfg = Config::new();
let res = file::save("tests/outputs/example", &cfg);
assert!(res.is_err());
}
#[test]
fn test_file_invalid() {
let res = file::load("tests/assets/example.txt");
assert!(res.is_err());
let cfg = Config::new();
let res = file::save("tests/outputs/example.txt", &cfg);
assert!(res.is_err());
}
|
#[doc = "Register `LUT871H` reader"]
pub type R = crate::R<LUT871H_SPEC>;
#[doc = "Register `LUT871H` writer"]
pub type W = crate::W<LUT871H_SPEC>;
#[doc = "Field `LO` reader - Line offset"]
pub type LO_R = crate::FieldReader<u32>;
#[doc = "Field `LO` writer - Line offset"]
pub type LO_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 18, O, u32>;
impl R {
#[doc = "Bits 4:21 - Line offset"]
#[inline(always)]
pub fn lo(&self) -> LO_R {
LO_R::new((self.bits >> 4) & 0x0003_ffff)
}
}
impl W {
#[doc = "Bits 4:21 - Line offset"]
#[inline(always)]
#[must_use]
pub fn lo(&mut self) -> LO_W<LUT871H_SPEC, 4> {
LO_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Graphic MMU LUT entry 871 high\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`lut871h::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`lut871h::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct LUT871H_SPEC;
impl crate::RegisterSpec for LUT871H_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`lut871h::R`](R) reader structure"]
impl crate::Readable for LUT871H_SPEC {}
#[doc = "`write(|w| ..)` method takes [`lut871h::W`](W) writer structure"]
impl crate::Writable for LUT871H_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets LUT871H to value 0"]
impl crate::Resettable for LUT871H_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use oxygengine::prelude::*;
#[derive(Debug, Default)]
pub struct GameState;
impl State for GameState {
fn on_enter(&mut self, universe: &mut Universe) {
universe
.expect_resource_mut::<PrefabManager>()
.instantiate("scene", universe)
.unwrap();
universe
.expect_resource_mut::<VnStoryManager>()
.get_mut("vn/story.yaml")
.unwrap()
.run_chapter("Main")
.expect("Could not run chapter");
}
}
|
#[doc = "Register `CCIPR` reader"]
pub type R = crate::R<CCIPR_SPEC>;
#[doc = "Register `CCIPR` writer"]
pub type W = crate::W<CCIPR_SPEC>;
#[doc = "Field `USART1SEL` reader - USART1 clock source selection"]
pub type USART1SEL_R = crate::FieldReader<USART1SEL_A>;
#[doc = "USART1 clock source selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum USART1SEL_A {
#[doc = "0: PCLK clock selected"]
Pclk = 0,
#[doc = "1: SYSCLK clock selected"]
Sysclk = 1,
#[doc = "2: HSI16 clock selected"]
Hsi16 = 2,
#[doc = "3: LSE clock selected"]
Lse = 3,
}
impl From<USART1SEL_A> for u8 {
#[inline(always)]
fn from(variant: USART1SEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for USART1SEL_A {
type Ux = u8;
}
impl USART1SEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> USART1SEL_A {
match self.bits {
0 => USART1SEL_A::Pclk,
1 => USART1SEL_A::Sysclk,
2 => USART1SEL_A::Hsi16,
3 => USART1SEL_A::Lse,
_ => unreachable!(),
}
}
#[doc = "PCLK clock selected"]
#[inline(always)]
pub fn is_pclk(&self) -> bool {
*self == USART1SEL_A::Pclk
}
#[doc = "SYSCLK clock selected"]
#[inline(always)]
pub fn is_sysclk(&self) -> bool {
*self == USART1SEL_A::Sysclk
}
#[doc = "HSI16 clock selected"]
#[inline(always)]
pub fn is_hsi16(&self) -> bool {
*self == USART1SEL_A::Hsi16
}
#[doc = "LSE clock selected"]
#[inline(always)]
pub fn is_lse(&self) -> bool {
*self == USART1SEL_A::Lse
}
}
#[doc = "Field `USART1SEL` writer - USART1 clock source selection"]
pub type USART1SEL_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, USART1SEL_A>;
impl<'a, REG, const O: u8> USART1SEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "PCLK clock selected"]
#[inline(always)]
pub fn pclk(self) -> &'a mut crate::W<REG> {
self.variant(USART1SEL_A::Pclk)
}
#[doc = "SYSCLK clock selected"]
#[inline(always)]
pub fn sysclk(self) -> &'a mut crate::W<REG> {
self.variant(USART1SEL_A::Sysclk)
}
#[doc = "HSI16 clock selected"]
#[inline(always)]
pub fn hsi16(self) -> &'a mut crate::W<REG> {
self.variant(USART1SEL_A::Hsi16)
}
#[doc = "LSE clock selected"]
#[inline(always)]
pub fn lse(self) -> &'a mut crate::W<REG> {
self.variant(USART1SEL_A::Lse)
}
}
#[doc = "Field `USART2SEL` reader - USART2 clock source selection"]
pub use USART1SEL_R as USART2SEL_R;
#[doc = "Field `USART3SEL` reader - USART3 clock source selection"]
pub use USART1SEL_R as USART3SEL_R;
#[doc = "Field `USART2SEL` writer - USART2 clock source selection"]
pub use USART1SEL_W as USART2SEL_W;
#[doc = "Field `USART3SEL` writer - USART3 clock source selection"]
pub use USART1SEL_W as USART3SEL_W;
#[doc = "Field `UART4SEL` reader - UART4 clock source selection"]
pub type UART4SEL_R = crate::FieldReader;
#[doc = "Field `UART4SEL` writer - UART4 clock source selection"]
pub type UART4SEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `UART5SEL` reader - UART5 clock source selection"]
pub type UART5SEL_R = crate::FieldReader;
#[doc = "Field `UART5SEL` writer - UART5 clock source selection"]
pub type UART5SEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `LPUART1SEL` reader - LPUART1 clock source selection"]
pub type LPUART1SEL_R = crate::FieldReader<LPUART1SEL_A>;
#[doc = "LPUART1 clock source selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum LPUART1SEL_A {
#[doc = "0: PCLK clock selected"]
Pclk = 0,
#[doc = "1: SYSCLK clock selected"]
Sysclk = 1,
#[doc = "2: HSI16 clock selected"]
Hsi16 = 2,
#[doc = "3: LSE clock selected"]
Lse = 3,
}
impl From<LPUART1SEL_A> for u8 {
#[inline(always)]
fn from(variant: LPUART1SEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for LPUART1SEL_A {
type Ux = u8;
}
impl LPUART1SEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LPUART1SEL_A {
match self.bits {
0 => LPUART1SEL_A::Pclk,
1 => LPUART1SEL_A::Sysclk,
2 => LPUART1SEL_A::Hsi16,
3 => LPUART1SEL_A::Lse,
_ => unreachable!(),
}
}
#[doc = "PCLK clock selected"]
#[inline(always)]
pub fn is_pclk(&self) -> bool {
*self == LPUART1SEL_A::Pclk
}
#[doc = "SYSCLK clock selected"]
#[inline(always)]
pub fn is_sysclk(&self) -> bool {
*self == LPUART1SEL_A::Sysclk
}
#[doc = "HSI16 clock selected"]
#[inline(always)]
pub fn is_hsi16(&self) -> bool {
*self == LPUART1SEL_A::Hsi16
}
#[doc = "LSE clock selected"]
#[inline(always)]
pub fn is_lse(&self) -> bool {
*self == LPUART1SEL_A::Lse
}
}
#[doc = "Field `LPUART1SEL` writer - LPUART1 clock source selection"]
pub type LPUART1SEL_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, LPUART1SEL_A>;
impl<'a, REG, const O: u8> LPUART1SEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "PCLK clock selected"]
#[inline(always)]
pub fn pclk(self) -> &'a mut crate::W<REG> {
self.variant(LPUART1SEL_A::Pclk)
}
#[doc = "SYSCLK clock selected"]
#[inline(always)]
pub fn sysclk(self) -> &'a mut crate::W<REG> {
self.variant(LPUART1SEL_A::Sysclk)
}
#[doc = "HSI16 clock selected"]
#[inline(always)]
pub fn hsi16(self) -> &'a mut crate::W<REG> {
self.variant(LPUART1SEL_A::Hsi16)
}
#[doc = "LSE clock selected"]
#[inline(always)]
pub fn lse(self) -> &'a mut crate::W<REG> {
self.variant(LPUART1SEL_A::Lse)
}
}
#[doc = "Field `I2C1SEL` reader - I2C1 clock source selection"]
pub type I2C1SEL_R = crate::FieldReader<I2C1SEL_A>;
#[doc = "I2C1 clock source selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum I2C1SEL_A {
#[doc = "0: PCLK clock selected"]
Pclk = 0,
#[doc = "1: SYSCLK clock selected"]
Sysclk = 1,
#[doc = "2: HSI16 clock selected"]
Hsi16 = 2,
}
impl From<I2C1SEL_A> for u8 {
#[inline(always)]
fn from(variant: I2C1SEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for I2C1SEL_A {
type Ux = u8;
}
impl I2C1SEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<I2C1SEL_A> {
match self.bits {
0 => Some(I2C1SEL_A::Pclk),
1 => Some(I2C1SEL_A::Sysclk),
2 => Some(I2C1SEL_A::Hsi16),
_ => None,
}
}
#[doc = "PCLK clock selected"]
#[inline(always)]
pub fn is_pclk(&self) -> bool {
*self == I2C1SEL_A::Pclk
}
#[doc = "SYSCLK clock selected"]
#[inline(always)]
pub fn is_sysclk(&self) -> bool {
*self == I2C1SEL_A::Sysclk
}
#[doc = "HSI16 clock selected"]
#[inline(always)]
pub fn is_hsi16(&self) -> bool {
*self == I2C1SEL_A::Hsi16
}
}
#[doc = "Field `I2C1SEL` writer - I2C1 clock source selection"]
pub type I2C1SEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O, I2C1SEL_A>;
impl<'a, REG, const O: u8> I2C1SEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "PCLK clock selected"]
#[inline(always)]
pub fn pclk(self) -> &'a mut crate::W<REG> {
self.variant(I2C1SEL_A::Pclk)
}
#[doc = "SYSCLK clock selected"]
#[inline(always)]
pub fn sysclk(self) -> &'a mut crate::W<REG> {
self.variant(I2C1SEL_A::Sysclk)
}
#[doc = "HSI16 clock selected"]
#[inline(always)]
pub fn hsi16(self) -> &'a mut crate::W<REG> {
self.variant(I2C1SEL_A::Hsi16)
}
}
#[doc = "Field `I2C2SEL` reader - I2C2 clock source selection"]
pub use I2C1SEL_R as I2C2SEL_R;
#[doc = "Field `I2C3SEL` reader - I2C3 clock source selection"]
pub use I2C1SEL_R as I2C3SEL_R;
#[doc = "Field `I2C2SEL` writer - I2C2 clock source selection"]
pub use I2C1SEL_W as I2C2SEL_W;
#[doc = "Field `I2C3SEL` writer - I2C3 clock source selection"]
pub use I2C1SEL_W as I2C3SEL_W;
#[doc = "Field `LPTIM1SEL` reader - Low power timer 1 clock source selection"]
pub type LPTIM1SEL_R = crate::FieldReader<LPTIM1SEL_A>;
#[doc = "Low power timer 1 clock source selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum LPTIM1SEL_A {
#[doc = "0: PCLK clock selected"]
Pclk = 0,
#[doc = "1: LSI clock selected"]
Lsi = 1,
#[doc = "2: HSI16 clock selected"]
Hsi16 = 2,
#[doc = "3: LSE clock selected"]
Lse = 3,
}
impl From<LPTIM1SEL_A> for u8 {
#[inline(always)]
fn from(variant: LPTIM1SEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for LPTIM1SEL_A {
type Ux = u8;
}
impl LPTIM1SEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LPTIM1SEL_A {
match self.bits {
0 => LPTIM1SEL_A::Pclk,
1 => LPTIM1SEL_A::Lsi,
2 => LPTIM1SEL_A::Hsi16,
3 => LPTIM1SEL_A::Lse,
_ => unreachable!(),
}
}
#[doc = "PCLK clock selected"]
#[inline(always)]
pub fn is_pclk(&self) -> bool {
*self == LPTIM1SEL_A::Pclk
}
#[doc = "LSI clock selected"]
#[inline(always)]
pub fn is_lsi(&self) -> bool {
*self == LPTIM1SEL_A::Lsi
}
#[doc = "HSI16 clock selected"]
#[inline(always)]
pub fn is_hsi16(&self) -> bool {
*self == LPTIM1SEL_A::Hsi16
}
#[doc = "LSE clock selected"]
#[inline(always)]
pub fn is_lse(&self) -> bool {
*self == LPTIM1SEL_A::Lse
}
}
#[doc = "Field `LPTIM1SEL` writer - Low power timer 1 clock source selection"]
pub type LPTIM1SEL_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, LPTIM1SEL_A>;
impl<'a, REG, const O: u8> LPTIM1SEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "PCLK clock selected"]
#[inline(always)]
pub fn pclk(self) -> &'a mut crate::W<REG> {
self.variant(LPTIM1SEL_A::Pclk)
}
#[doc = "LSI clock selected"]
#[inline(always)]
pub fn lsi(self) -> &'a mut crate::W<REG> {
self.variant(LPTIM1SEL_A::Lsi)
}
#[doc = "HSI16 clock selected"]
#[inline(always)]
pub fn hsi16(self) -> &'a mut crate::W<REG> {
self.variant(LPTIM1SEL_A::Hsi16)
}
#[doc = "LSE clock selected"]
#[inline(always)]
pub fn lse(self) -> &'a mut crate::W<REG> {
self.variant(LPTIM1SEL_A::Lse)
}
}
#[doc = "Field `LPTIM2SEL` reader - Low power timer 2 clock source selection"]
pub use LPTIM1SEL_R as LPTIM2SEL_R;
#[doc = "Field `LPTIM2SEL` writer - Low power timer 2 clock source selection"]
pub use LPTIM1SEL_W as LPTIM2SEL_W;
#[doc = "Field `SAI1SEL` reader - SAI1 clock source selection"]
pub type SAI1SEL_R = crate::FieldReader;
#[doc = "Field `SAI1SEL` writer - SAI1 clock source selection"]
pub type SAI1SEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `SAI2SEL` reader - SAI2 clock source selection"]
pub type SAI2SEL_R = crate::FieldReader;
#[doc = "Field `SAI2SEL` writer - SAI2 clock source selection"]
pub type SAI2SEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `CLK48SEL` reader - 48 MHz clock source selection"]
pub type CLK48SEL_R = crate::FieldReader<CLK48SEL_A>;
#[doc = "48 MHz clock source selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum CLK48SEL_A {
#[doc = "0: HSI48 clock selected (only for STM32L41x/L42x/L43x/L44x/L45x/L46x/L49x/L4Ax devices, otherwise no clock selected)"]
Hsi48 = 0,
#[doc = "1: PLLSAI1 clock selected"]
Pllsai1 = 1,
#[doc = "2: PLL clock selected"]
Pll = 2,
#[doc = "3: MSI clock selected"]
Msi = 3,
}
impl From<CLK48SEL_A> for u8 {
#[inline(always)]
fn from(variant: CLK48SEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for CLK48SEL_A {
type Ux = u8;
}
impl CLK48SEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CLK48SEL_A {
match self.bits {
0 => CLK48SEL_A::Hsi48,
1 => CLK48SEL_A::Pllsai1,
2 => CLK48SEL_A::Pll,
3 => CLK48SEL_A::Msi,
_ => unreachable!(),
}
}
#[doc = "HSI48 clock selected (only for STM32L41x/L42x/L43x/L44x/L45x/L46x/L49x/L4Ax devices, otherwise no clock selected)"]
#[inline(always)]
pub fn is_hsi48(&self) -> bool {
*self == CLK48SEL_A::Hsi48
}
#[doc = "PLLSAI1 clock selected"]
#[inline(always)]
pub fn is_pllsai1(&self) -> bool {
*self == CLK48SEL_A::Pllsai1
}
#[doc = "PLL clock selected"]
#[inline(always)]
pub fn is_pll(&self) -> bool {
*self == CLK48SEL_A::Pll
}
#[doc = "MSI clock selected"]
#[inline(always)]
pub fn is_msi(&self) -> bool {
*self == CLK48SEL_A::Msi
}
}
#[doc = "Field `CLK48SEL` writer - 48 MHz clock source selection"]
pub type CLK48SEL_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, CLK48SEL_A>;
impl<'a, REG, const O: u8> CLK48SEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "HSI48 clock selected (only for STM32L41x/L42x/L43x/L44x/L45x/L46x/L49x/L4Ax devices, otherwise no clock selected)"]
#[inline(always)]
pub fn hsi48(self) -> &'a mut crate::W<REG> {
self.variant(CLK48SEL_A::Hsi48)
}
#[doc = "PLLSAI1 clock selected"]
#[inline(always)]
pub fn pllsai1(self) -> &'a mut crate::W<REG> {
self.variant(CLK48SEL_A::Pllsai1)
}
#[doc = "PLL clock selected"]
#[inline(always)]
pub fn pll(self) -> &'a mut crate::W<REG> {
self.variant(CLK48SEL_A::Pll)
}
#[doc = "MSI clock selected"]
#[inline(always)]
pub fn msi(self) -> &'a mut crate::W<REG> {
self.variant(CLK48SEL_A::Msi)
}
}
#[doc = "Field `ADCSEL` reader - ADCs clock source selection"]
pub type ADCSEL_R = crate::FieldReader<ADCSEL_A>;
#[doc = "ADCs clock source selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum ADCSEL_A {
#[doc = "0: No clock selected"]
NoClock = 0,
#[doc = "1: PLLSAI1 clock selected"]
Pllsai1 = 1,
#[doc = "2: PLLSAI2 clock selected (only for STM32L47x/L48x/L49x/L4Ax devices)"]
Pllsai2 = 2,
#[doc = "3: SYSCLK clock selected"]
Sysclk = 3,
}
impl From<ADCSEL_A> for u8 {
#[inline(always)]
fn from(variant: ADCSEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for ADCSEL_A {
type Ux = u8;
}
impl ADCSEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ADCSEL_A {
match self.bits {
0 => ADCSEL_A::NoClock,
1 => ADCSEL_A::Pllsai1,
2 => ADCSEL_A::Pllsai2,
3 => ADCSEL_A::Sysclk,
_ => unreachable!(),
}
}
#[doc = "No clock selected"]
#[inline(always)]
pub fn is_no_clock(&self) -> bool {
*self == ADCSEL_A::NoClock
}
#[doc = "PLLSAI1 clock selected"]
#[inline(always)]
pub fn is_pllsai1(&self) -> bool {
*self == ADCSEL_A::Pllsai1
}
#[doc = "PLLSAI2 clock selected (only for STM32L47x/L48x/L49x/L4Ax devices)"]
#[inline(always)]
pub fn is_pllsai2(&self) -> bool {
*self == ADCSEL_A::Pllsai2
}
#[doc = "SYSCLK clock selected"]
#[inline(always)]
pub fn is_sysclk(&self) -> bool {
*self == ADCSEL_A::Sysclk
}
}
#[doc = "Field `ADCSEL` writer - ADCs clock source selection"]
pub type ADCSEL_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, ADCSEL_A>;
impl<'a, REG, const O: u8> ADCSEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "No clock selected"]
#[inline(always)]
pub fn no_clock(self) -> &'a mut crate::W<REG> {
self.variant(ADCSEL_A::NoClock)
}
#[doc = "PLLSAI1 clock selected"]
#[inline(always)]
pub fn pllsai1(self) -> &'a mut crate::W<REG> {
self.variant(ADCSEL_A::Pllsai1)
}
#[doc = "PLLSAI2 clock selected (only for STM32L47x/L48x/L49x/L4Ax devices)"]
#[inline(always)]
pub fn pllsai2(self) -> &'a mut crate::W<REG> {
self.variant(ADCSEL_A::Pllsai2)
}
#[doc = "SYSCLK clock selected"]
#[inline(always)]
pub fn sysclk(self) -> &'a mut crate::W<REG> {
self.variant(ADCSEL_A::Sysclk)
}
}
#[doc = "Field `SWPMI1SEL` reader - SWPMI1 clock source selection"]
pub type SWPMI1SEL_R = crate::BitReader;
#[doc = "Field `SWPMI1SEL` writer - SWPMI1 clock source selection"]
pub type SWPMI1SEL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DFSDMSEL` reader - DFSDM clock source selection"]
pub type DFSDMSEL_R = crate::BitReader;
#[doc = "Field `DFSDMSEL` writer - DFSDM clock source selection"]
pub type DFSDMSEL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bits 0:1 - USART1 clock source selection"]
#[inline(always)]
pub fn usart1sel(&self) -> USART1SEL_R {
USART1SEL_R::new((self.bits & 3) as u8)
}
#[doc = "Bits 2:3 - USART2 clock source selection"]
#[inline(always)]
pub fn usart2sel(&self) -> USART2SEL_R {
USART2SEL_R::new(((self.bits >> 2) & 3) as u8)
}
#[doc = "Bits 4:5 - USART3 clock source selection"]
#[inline(always)]
pub fn usart3sel(&self) -> USART3SEL_R {
USART3SEL_R::new(((self.bits >> 4) & 3) as u8)
}
#[doc = "Bits 6:7 - UART4 clock source selection"]
#[inline(always)]
pub fn uart4sel(&self) -> UART4SEL_R {
UART4SEL_R::new(((self.bits >> 6) & 3) as u8)
}
#[doc = "Bits 8:9 - UART5 clock source selection"]
#[inline(always)]
pub fn uart5sel(&self) -> UART5SEL_R {
UART5SEL_R::new(((self.bits >> 8) & 3) as u8)
}
#[doc = "Bits 10:11 - LPUART1 clock source selection"]
#[inline(always)]
pub fn lpuart1sel(&self) -> LPUART1SEL_R {
LPUART1SEL_R::new(((self.bits >> 10) & 3) as u8)
}
#[doc = "Bits 12:13 - I2C1 clock source selection"]
#[inline(always)]
pub fn i2c1sel(&self) -> I2C1SEL_R {
I2C1SEL_R::new(((self.bits >> 12) & 3) as u8)
}
#[doc = "Bits 14:15 - I2C2 clock source selection"]
#[inline(always)]
pub fn i2c2sel(&self) -> I2C2SEL_R {
I2C2SEL_R::new(((self.bits >> 14) & 3) as u8)
}
#[doc = "Bits 16:17 - I2C3 clock source selection"]
#[inline(always)]
pub fn i2c3sel(&self) -> I2C3SEL_R {
I2C3SEL_R::new(((self.bits >> 16) & 3) as u8)
}
#[doc = "Bits 18:19 - Low power timer 1 clock source selection"]
#[inline(always)]
pub fn lptim1sel(&self) -> LPTIM1SEL_R {
LPTIM1SEL_R::new(((self.bits >> 18) & 3) as u8)
}
#[doc = "Bits 20:21 - Low power timer 2 clock source selection"]
#[inline(always)]
pub fn lptim2sel(&self) -> LPTIM2SEL_R {
LPTIM2SEL_R::new(((self.bits >> 20) & 3) as u8)
}
#[doc = "Bits 22:23 - SAI1 clock source selection"]
#[inline(always)]
pub fn sai1sel(&self) -> SAI1SEL_R {
SAI1SEL_R::new(((self.bits >> 22) & 3) as u8)
}
#[doc = "Bits 24:25 - SAI2 clock source selection"]
#[inline(always)]
pub fn sai2sel(&self) -> SAI2SEL_R {
SAI2SEL_R::new(((self.bits >> 24) & 3) as u8)
}
#[doc = "Bits 26:27 - 48 MHz clock source selection"]
#[inline(always)]
pub fn clk48sel(&self) -> CLK48SEL_R {
CLK48SEL_R::new(((self.bits >> 26) & 3) as u8)
}
#[doc = "Bits 28:29 - ADCs clock source selection"]
#[inline(always)]
pub fn adcsel(&self) -> ADCSEL_R {
ADCSEL_R::new(((self.bits >> 28) & 3) as u8)
}
#[doc = "Bit 30 - SWPMI1 clock source selection"]
#[inline(always)]
pub fn swpmi1sel(&self) -> SWPMI1SEL_R {
SWPMI1SEL_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - DFSDM clock source selection"]
#[inline(always)]
pub fn dfsdmsel(&self) -> DFSDMSEL_R {
DFSDMSEL_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:1 - USART1 clock source selection"]
#[inline(always)]
#[must_use]
pub fn usart1sel(&mut self) -> USART1SEL_W<CCIPR_SPEC, 0> {
USART1SEL_W::new(self)
}
#[doc = "Bits 2:3 - USART2 clock source selection"]
#[inline(always)]
#[must_use]
pub fn usart2sel(&mut self) -> USART2SEL_W<CCIPR_SPEC, 2> {
USART2SEL_W::new(self)
}
#[doc = "Bits 4:5 - USART3 clock source selection"]
#[inline(always)]
#[must_use]
pub fn usart3sel(&mut self) -> USART3SEL_W<CCIPR_SPEC, 4> {
USART3SEL_W::new(self)
}
#[doc = "Bits 6:7 - UART4 clock source selection"]
#[inline(always)]
#[must_use]
pub fn uart4sel(&mut self) -> UART4SEL_W<CCIPR_SPEC, 6> {
UART4SEL_W::new(self)
}
#[doc = "Bits 8:9 - UART5 clock source selection"]
#[inline(always)]
#[must_use]
pub fn uart5sel(&mut self) -> UART5SEL_W<CCIPR_SPEC, 8> {
UART5SEL_W::new(self)
}
#[doc = "Bits 10:11 - LPUART1 clock source selection"]
#[inline(always)]
#[must_use]
pub fn lpuart1sel(&mut self) -> LPUART1SEL_W<CCIPR_SPEC, 10> {
LPUART1SEL_W::new(self)
}
#[doc = "Bits 12:13 - I2C1 clock source selection"]
#[inline(always)]
#[must_use]
pub fn i2c1sel(&mut self) -> I2C1SEL_W<CCIPR_SPEC, 12> {
I2C1SEL_W::new(self)
}
#[doc = "Bits 14:15 - I2C2 clock source selection"]
#[inline(always)]
#[must_use]
pub fn i2c2sel(&mut self) -> I2C2SEL_W<CCIPR_SPEC, 14> {
I2C2SEL_W::new(self)
}
#[doc = "Bits 16:17 - I2C3 clock source selection"]
#[inline(always)]
#[must_use]
pub fn i2c3sel(&mut self) -> I2C3SEL_W<CCIPR_SPEC, 16> {
I2C3SEL_W::new(self)
}
#[doc = "Bits 18:19 - Low power timer 1 clock source selection"]
#[inline(always)]
#[must_use]
pub fn lptim1sel(&mut self) -> LPTIM1SEL_W<CCIPR_SPEC, 18> {
LPTIM1SEL_W::new(self)
}
#[doc = "Bits 20:21 - Low power timer 2 clock source selection"]
#[inline(always)]
#[must_use]
pub fn lptim2sel(&mut self) -> LPTIM2SEL_W<CCIPR_SPEC, 20> {
LPTIM2SEL_W::new(self)
}
#[doc = "Bits 22:23 - SAI1 clock source selection"]
#[inline(always)]
#[must_use]
pub fn sai1sel(&mut self) -> SAI1SEL_W<CCIPR_SPEC, 22> {
SAI1SEL_W::new(self)
}
#[doc = "Bits 24:25 - SAI2 clock source selection"]
#[inline(always)]
#[must_use]
pub fn sai2sel(&mut self) -> SAI2SEL_W<CCIPR_SPEC, 24> {
SAI2SEL_W::new(self)
}
#[doc = "Bits 26:27 - 48 MHz clock source selection"]
#[inline(always)]
#[must_use]
pub fn clk48sel(&mut self) -> CLK48SEL_W<CCIPR_SPEC, 26> {
CLK48SEL_W::new(self)
}
#[doc = "Bits 28:29 - ADCs clock source selection"]
#[inline(always)]
#[must_use]
pub fn adcsel(&mut self) -> ADCSEL_W<CCIPR_SPEC, 28> {
ADCSEL_W::new(self)
}
#[doc = "Bit 30 - SWPMI1 clock source selection"]
#[inline(always)]
#[must_use]
pub fn swpmi1sel(&mut self) -> SWPMI1SEL_W<CCIPR_SPEC, 30> {
SWPMI1SEL_W::new(self)
}
#[doc = "Bit 31 - DFSDM clock source selection"]
#[inline(always)]
#[must_use]
pub fn dfsdmsel(&mut self) -> DFSDMSEL_W<CCIPR_SPEC, 31> {
DFSDMSEL_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "CCIPR\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccipr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ccipr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CCIPR_SPEC;
impl crate::RegisterSpec for CCIPR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ccipr::R`](R) reader structure"]
impl crate::Readable for CCIPR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ccipr::W`](W) writer structure"]
impl crate::Writable for CCIPR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CCIPR to value 0"]
impl crate::Resettable for CCIPR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use cpal::ChannelCount;
use dsp::window::Window;
use rustfft::num_complex::Complex;
use rustfft::num_traits::Zero;
pub type RealVec = Vec<f32>;
pub type FftSample = Complex<f32>;
pub type FftVec = Vec<FftSample>;
pub type FftSlice = [FftSample];
pub type FftCallback<'a> = &'a mut dyn FnMut(&FftSlice);
/// How to window the FFT to reduce sidelobes.
#[derive(Debug, Copy, Clone)]
pub enum WindowType {
Rect,
Hann,
}
/// Normalization note: The FFT's output is divided by `size`
/// so a pure DC input will result in an output of `volume`.
/// As `size` increases, pure tones become thinner but not brighter,
/// and noise becomes dimmer.
#[derive(Debug, Copy, Clone)]
pub struct FftConfig {
/// How much to amplify the incoming signal when performing the FFT.
pub volume: f32,
/// How many samples per FFT block.
/// (It's probably nonsensical to use a size less than 32 or so.)
pub size: usize,
/// How many samples to advance before the next FFT.
/// Must be <= size.
pub redraw_interval: usize,
/// The incoming wave is [frame][channel]i16.
/// This stores the number of channels to average (or eventually separate out).
/// Must be >= 1.
pub channels: ChannelCount,
/// How to window the input signal to reduce sidelobes.
pub window_type: WindowType,
// TODO downmix: bool,
// TODO add option to overlap by 50%.
// TODO add option for whether to allow multiple calls in the same push.
}
/// Accepts data from the audio thread, buffers to full FFT blocks, and runs FFT.
pub struct FftBuffer {
// User parameters. Do not mutate.
cfg: FftConfig,
// Derived/cached data. Do not mutate.
fft: realfft::RealToComplex<f32>,
window: Option<Window>,
// Mutable state.
buffer: RealVec,
scratch: RealVec,
spectrum: FftVec,
}
impl FftBuffer {
pub fn new(cfg: FftConfig) -> FftBuffer {
assert!(cfg.size >= 2);
assert!(cfg.channels >= 1);
assert!(cfg.redraw_interval <= cfg.size);
let fft = realfft::RealToComplex::<f32>::new(cfg.size).unwrap();
FftBuffer {
cfg,
// downmix,
fft,
window: match cfg.window_type {
WindowType::Rect => None,
WindowType::Hann => Some(dsp::window::hann(cfg.size, 0, cfg.size)),
},
// current: Vec::with_capacity(size),
buffer: Vec::with_capacity(cfg.size),
scratch: vec![0.; cfg.size],
spectrum: vec![FftSample::zero(); cfg.size / 2 + 1],
}
}
pub fn spectrum_size(&self) -> usize {
self.spectrum.len()
}
/// input.len() must be a multiple of channels.
/// Samples are assumed to be interleaved.
///
/// fft_callback() is called on a (len/2 + 1) vector of complex values,
/// where elements 0 and len/2 are purely real.
pub fn push(&mut self, input: &[i16], fft_callback: FftCallback) {
let frames = input.chunks_exact(self.cfg.channels as usize);
for frame in frames {
let avg = {
let mut sum: f32 = 0.;
for &sample in frame {
sum += (sample as f32) / 32768.0;
}
sum / (self.cfg.channels as f32)
};
self.buffer.push(avg);
if self.buffer.len() == self.buffer.capacity() {
(&mut *self).run_fft();
fft_callback(&self.spectrum);
// Remove the first `redraw_interval` samples from the vector,
// such that `redraw_interval` samples must be pushed
// to trigger the next redraw.
self.buffer.drain(..self.cfg.redraw_interval);
}
}
assert_eq!(self.buffer.capacity(), self.cfg.size);
}
/// Preconditions:
/// - self.buffer.len() == self.cfg.size (via pushing).
/// - self.scratch.len() == self.cfg.size (via initialization).
///
/// Postconditions:
/// - self.spectrum contains the windowed FFT of self.buffer.
/// - self.buffer is unchanged.
fn run_fft(&mut self) {
if let Some(window) = &self.window {
// Precondition: LHS, input, and output have same length.
window.apply(&self.buffer, &mut self.scratch);
} else {
// Precondition: LHS and src have same length.
(&mut self.scratch).copy_from_slice(&self.buffer);
}
self.fft
.process(&mut self.scratch, &mut self.spectrum)
.unwrap();
for elem in self.spectrum.iter_mut() {
*elem *= self.cfg.volume / self.buffer.len() as f32;
}
}
}
|
#[derive(Debug)]
pub struct Claim {
pub id: u16,
pub left: u32,
pub top: u32,
pub width: u32,
pub height: u32,
}
#[aoc_generator(day3)]
pub fn input_gen(input: &str) -> Vec<Claim> {
input
.lines()
.map(|line| {
let (first, last) = line.split_at(line.find('@').unwrap() + 1);
let id = first
.trim_matches(|c| c == '#' || c == '@')
.trim()
.parse()
.unwrap();
let parts = last.split(": ").collect::<Vec<&str>>();
let edges = parts[0]
.split(",")
.map(|s| s.trim().parse().unwrap())
.collect::<Vec<u32>>();
let dimensions = parts[1]
.split("x")
.map(|s| s.parse().unwrap())
.collect::<Vec<u32>>();
Claim {
id,
left: edges[0],
top: edges[1],
width: dimensions[0],
height: dimensions[1],
}
})
.collect()
}
#[aoc(day3, part1)]
pub fn solve_part1(input: &[Claim]) -> u32 {
let mut fabric = [[0i16; 1000]; 1000];
for c in input.iter() {
create_claim(c, &mut fabric);
}
let mut two_or_more_claims = 0;
for row in fabric.iter() {
for col in row.iter() {
if *col == -1 {
two_or_more_claims += 1;
}
}
}
two_or_more_claims
}
#[aoc(day3, part2)]
pub fn solve_part2(input: &[Claim]) -> u16 {
let mut fabric = [[0i16; 1000]; 1000];
let mut id = 0;
for c in input.iter() {
create_claim(c, &mut fabric);
}
for c in input.iter() {
if check_claim(c, &mut fabric) {
id = c.id;
}
}
id
}
fn create_claim(claim: &Claim, fabric: &mut [[i16; 1000]; 1000]) {
for row in fabric
.iter_mut()
.skip(claim.top as usize)
.take(claim.height as usize)
{
for col in row
.iter_mut()
.skip(claim.left as usize)
.take(claim.width as usize)
{
if *col == 0 {
*col = claim.id as i16;
} else {
*col = -1;
}
}
}
}
fn check_claim(claim: &Claim, fabric: &mut [[i16; 1000]; 1000]) -> bool {
let intact = true;
for row in fabric
.iter_mut()
.skip(claim.top as usize)
.take(claim.height as usize)
{
for col in row
.iter_mut()
.skip(claim.left as usize)
.take(claim.width as usize)
{
if *col != claim.id as i16 {
return false;
}
}
}
intact
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_input() {
let input = include_str!("../input/tests/d3.txt");
let claims = input_gen(input);
let result = solve_part1(&claims);
assert_eq!(claims.len(), 3);
assert_eq!(result, 4);
}
#[test]
fn test_input2() {
let input = include_str!("../input/tests/d3.txt");
let claims = input_gen(input);
let result = solve_part2(&claims);
assert_eq!(claims.len(), 3);
assert_eq!(result, 3);
}
}
|
extern crate toml;
use std::io::Read;
use std::fs::File;
#[derive(Debug)]
pub enum TextType{
LXX,
MT,
}
#[derive(Debug)]
pub struct Psalter{
version: String,
verse_delimiter: String,
text_type: TextType,
}
#[derive(Debug)]
pub struct Psalm{
number: usize,
text: String,
}
//TODO: Make this not hard coded
static PSALTER_PATH: &'static str = "/home/aletheia/git/anthologion_rs/resources/example/bible/psalms.toml";
impl Psalter{
pub fn new(version: String, verse_delimiter: String, text_type: TextType) -> Self{
Psalter{ version: version, verse_delimiter: verse_delimiter, text_type: text_type }
}
}
impl Psalm{
pub fn new(number: usize, text: String) -> Self{
Psalm{ number: number, text: text }
}
}
impl ToString for Psalm{
fn to_string(&self) -> String {
self.text.clone()
}
}
pub fn get_psalm(target: usize) -> Psalm {
let target = target - 1;
let mut psalter_file = File::open(PSALTER_PATH).unwrap();
let mut psalter_toml = String::new();
psalter_file.read_to_string(&mut psalter_toml);
let psalter: toml::Value = psalter_toml.parse().unwrap();
//Get the psalm number
let toml_path = "psalm.".to_string() + &target.to_string() +".number";
let number = psalter.lookup(&toml_path).unwrap();
let number = match *number {
toml::Value::Integer(n) => n as usize,
_ => 256 as usize,
};
//find the psalm text
let toml_path = "psalm.".to_string() + &target.to_string() +".text";
let text = psalter.lookup(&toml_path).unwrap();
let text = match *text{
toml::Value::String(ref s) => s.clone(),
_ => String::new(),
};
// let text = psalter.lookup("psalm.0.text").unwrap();
//Psalm::new(number, text.to_string())
Psalm::new(number, text)
}
#[test]
fn try_get_psalm(){
let p = get_psalm(1);
assert!(p.number == 1);
assert!(p.text.contains("Blessed is the man"));
}
|
use std::fs::File;
use std::io::prelude::*;
use std::process::Command;
fn run(input: &str, expect: &str) {
// compile
Command::new("sh")
.arg("-c")
.arg(format!("./target/debug/rcc2 \"{}\"", input))
.status()
.expect("process failed to execute");
// run generated IR and get returned status code
let status = Command::new("sh")
.arg("-c")
.arg("llvm-as compiled.ll; lli compiled.bc")
.status()
.expect("failed to execute process");
println!("{:?} => {:?}", status.to_string(), expect);
assert!(status.to_string() == String::from(format!("exit code: {}", expect)));
}
fn get_code(filename: &str) -> String {
let filename = String::from("./tests/resources/") + filename;
let mut f = File::open(filename).expect("file not found");
let mut contents = String::new();
f.read_to_string(&mut contents)
.expect("somethig went wrong reading the file");
contents
}
#[test]
fn test_addition() {
let code = get_code("test_addition.c");
run(&code, "3")
}
#[test]
fn test_array() {
let code = get_code("test_array.c");
run(&code, "33")
}
#[test]
fn test_array2() {
let code = get_code("test_array2.c");
run(&code, "77")
}
#[test]
fn test_array3() {
let code = get_code("test_array3.c");
run(&code, "72")
}
#[test]
fn test_division() {
let code = get_code("test_division.c");
run(&code, "6")
}
#[test]
fn test_function_call() {
let code = get_code("test_function_call.c");
run(&code, "10")
}
#[test]
fn test_function_call2() {
let code = get_code("test_function_call2.c");
run(&code, "35")
}
#[test]
fn test_multi_statement() {
let code = get_code("test_multi_statements.c");
run(&code, "23")
}
#[test]
fn test_multi_term() {
let code = get_code("test_multi_term.c");
run(&code, "7")
}
#[test]
fn test_multiplication() {
let code = get_code("test_multiplication.c");
run(&code, "2")
}
#[test]
fn test_pointer() {
let code = get_code("test_pointer.c");
run(&code, "24")
}
#[test]
fn test_subtraction() {
let code = get_code("test_subtraction.c");
run(&code, "5")
}
#[test]
fn test_variable() {
let code = get_code("test_variable.c");
run(&code, "26")
}
|
use days::day08::common::*;
pub fn search(data: &str) -> isize {
let mut acc = Registry::new();
let registry = data.lines()
.map(Line::from)
// TODO: Return Err() immediately
.filter_map(|line| line.ok())
.fold(&mut acc, |acc, line| {
acc.execute(line);
acc
});
registry.highest()
}
#[cfg(test)]
mod tests {
use super::search;
#[test]
fn day08_part1_test1() {
let data = "b inc 5 if a > 1
a inc 1 if b < 5
c dec -10 if a >= 1
c inc -20 if c == 10
";
assert_eq!(10, search(data));
}
}
|
#[doc = "Register `ETH_MACCR` reader"]
pub type R = crate::R<ETH_MACCR_SPEC>;
#[doc = "Register `ETH_MACCR` writer"]
pub type W = crate::W<ETH_MACCR_SPEC>;
#[doc = "Field `RE` reader - RE"]
pub type RE_R = crate::BitReader;
#[doc = "Field `RE` writer - RE"]
pub type RE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TE` reader - TE"]
pub type TE_R = crate::BitReader;
#[doc = "Field `TE` writer - TE"]
pub type TE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PRELEN` reader - PRELEN"]
pub type PRELEN_R = crate::FieldReader;
#[doc = "Field `PRELEN` writer - PRELEN"]
pub type PRELEN_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `DC` reader - DC"]
pub type DC_R = crate::BitReader;
#[doc = "Field `DC` writer - DC"]
pub type DC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `BL` reader - BL"]
pub type BL_R = crate::FieldReader;
#[doc = "Field `BL` writer - BL"]
pub type BL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `DR` reader - DR"]
pub type DR_R = crate::BitReader;
#[doc = "Field `DR` writer - DR"]
pub type DR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DCRS` reader - DCRS"]
pub type DCRS_R = crate::BitReader;
#[doc = "Field `DCRS` writer - DCRS"]
pub type DCRS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DO` reader - DO"]
pub type DO_R = crate::BitReader;
#[doc = "Field `DO` writer - DO"]
pub type DO_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ECRSFD` reader - ECRSFD"]
pub type ECRSFD_R = crate::BitReader;
#[doc = "Field `ECRSFD` writer - ECRSFD"]
pub type ECRSFD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LM` reader - LM"]
pub type LM_R = crate::BitReader;
#[doc = "Field `LM` writer - LM"]
pub type LM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DM` reader - DM"]
pub type DM_R = crate::BitReader;
#[doc = "Field `DM` writer - DM"]
pub type DM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FES` reader - FES"]
pub type FES_R = crate::BitReader;
#[doc = "Field `FES` writer - FES"]
pub type FES_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PS` reader - PS"]
pub type PS_R = crate::BitReader;
#[doc = "Field `PS` writer - PS"]
pub type PS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `JE` reader - JE"]
pub type JE_R = crate::BitReader;
#[doc = "Field `JE` writer - JE"]
pub type JE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `JD` reader - JD"]
pub type JD_R = crate::BitReader;
#[doc = "Field `JD` writer - JD"]
pub type JD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `BE` reader - BE"]
pub type BE_R = crate::BitReader;
#[doc = "Field `BE` writer - BE"]
pub type BE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `WD` reader - WD"]
pub type WD_R = crate::BitReader;
#[doc = "Field `WD` writer - WD"]
pub type WD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ACS` reader - ACS"]
pub type ACS_R = crate::BitReader;
#[doc = "Field `ACS` writer - ACS"]
pub type ACS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CST` reader - CST"]
pub type CST_R = crate::BitReader;
#[doc = "Field `CST` writer - CST"]
pub type CST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `S2KP` reader - S2KP"]
pub type S2KP_R = crate::BitReader;
#[doc = "Field `S2KP` writer - S2KP"]
pub type S2KP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `GPSLCE` reader - GPSLCE"]
pub type GPSLCE_R = crate::BitReader;
#[doc = "Field `GPSLCE` writer - GPSLCE"]
pub type GPSLCE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `IPG` reader - IPG"]
pub type IPG_R = crate::FieldReader;
#[doc = "Field `IPG` writer - IPG"]
pub type IPG_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `IPC` reader - IPC"]
pub type IPC_R = crate::BitReader;
#[doc = "Field `IPC` writer - IPC"]
pub type IPC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SARC` reader - SARC"]
pub type SARC_R = crate::FieldReader;
#[doc = "Field `SARC` writer - SARC"]
pub type SARC_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `ARPEN` reader - ARPEN"]
pub type ARPEN_R = crate::BitReader;
#[doc = "Field `ARPEN` writer - ARPEN"]
pub type ARPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - RE"]
#[inline(always)]
pub fn re(&self) -> RE_R {
RE_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - TE"]
#[inline(always)]
pub fn te(&self) -> TE_R {
TE_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bits 2:3 - PRELEN"]
#[inline(always)]
pub fn prelen(&self) -> PRELEN_R {
PRELEN_R::new(((self.bits >> 2) & 3) as u8)
}
#[doc = "Bit 4 - DC"]
#[inline(always)]
pub fn dc(&self) -> DC_R {
DC_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bits 5:6 - BL"]
#[inline(always)]
pub fn bl(&self) -> BL_R {
BL_R::new(((self.bits >> 5) & 3) as u8)
}
#[doc = "Bit 8 - DR"]
#[inline(always)]
pub fn dr(&self) -> DR_R {
DR_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - DCRS"]
#[inline(always)]
pub fn dcrs(&self) -> DCRS_R {
DCRS_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - DO"]
#[inline(always)]
pub fn do_(&self) -> DO_R {
DO_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - ECRSFD"]
#[inline(always)]
pub fn ecrsfd(&self) -> ECRSFD_R {
ECRSFD_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - LM"]
#[inline(always)]
pub fn lm(&self) -> LM_R {
LM_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - DM"]
#[inline(always)]
pub fn dm(&self) -> DM_R {
DM_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - FES"]
#[inline(always)]
pub fn fes(&self) -> FES_R {
FES_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - PS"]
#[inline(always)]
pub fn ps(&self) -> PS_R {
PS_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - JE"]
#[inline(always)]
pub fn je(&self) -> JE_R {
JE_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - JD"]
#[inline(always)]
pub fn jd(&self) -> JD_R {
JD_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - BE"]
#[inline(always)]
pub fn be(&self) -> BE_R {
BE_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - WD"]
#[inline(always)]
pub fn wd(&self) -> WD_R {
WD_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - ACS"]
#[inline(always)]
pub fn acs(&self) -> ACS_R {
ACS_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - CST"]
#[inline(always)]
pub fn cst(&self) -> CST_R {
CST_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - S2KP"]
#[inline(always)]
pub fn s2kp(&self) -> S2KP_R {
S2KP_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - GPSLCE"]
#[inline(always)]
pub fn gpslce(&self) -> GPSLCE_R {
GPSLCE_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bits 24:26 - IPG"]
#[inline(always)]
pub fn ipg(&self) -> IPG_R {
IPG_R::new(((self.bits >> 24) & 7) as u8)
}
#[doc = "Bit 27 - IPC"]
#[inline(always)]
pub fn ipc(&self) -> IPC_R {
IPC_R::new(((self.bits >> 27) & 1) != 0)
}
#[doc = "Bits 28:30 - SARC"]
#[inline(always)]
pub fn sarc(&self) -> SARC_R {
SARC_R::new(((self.bits >> 28) & 7) as u8)
}
#[doc = "Bit 31 - ARPEN"]
#[inline(always)]
pub fn arpen(&self) -> ARPEN_R {
ARPEN_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - RE"]
#[inline(always)]
#[must_use]
pub fn re(&mut self) -> RE_W<ETH_MACCR_SPEC, 0> {
RE_W::new(self)
}
#[doc = "Bit 1 - TE"]
#[inline(always)]
#[must_use]
pub fn te(&mut self) -> TE_W<ETH_MACCR_SPEC, 1> {
TE_W::new(self)
}
#[doc = "Bits 2:3 - PRELEN"]
#[inline(always)]
#[must_use]
pub fn prelen(&mut self) -> PRELEN_W<ETH_MACCR_SPEC, 2> {
PRELEN_W::new(self)
}
#[doc = "Bit 4 - DC"]
#[inline(always)]
#[must_use]
pub fn dc(&mut self) -> DC_W<ETH_MACCR_SPEC, 4> {
DC_W::new(self)
}
#[doc = "Bits 5:6 - BL"]
#[inline(always)]
#[must_use]
pub fn bl(&mut self) -> BL_W<ETH_MACCR_SPEC, 5> {
BL_W::new(self)
}
#[doc = "Bit 8 - DR"]
#[inline(always)]
#[must_use]
pub fn dr(&mut self) -> DR_W<ETH_MACCR_SPEC, 8> {
DR_W::new(self)
}
#[doc = "Bit 9 - DCRS"]
#[inline(always)]
#[must_use]
pub fn dcrs(&mut self) -> DCRS_W<ETH_MACCR_SPEC, 9> {
DCRS_W::new(self)
}
#[doc = "Bit 10 - DO"]
#[inline(always)]
#[must_use]
pub fn do_(&mut self) -> DO_W<ETH_MACCR_SPEC, 10> {
DO_W::new(self)
}
#[doc = "Bit 11 - ECRSFD"]
#[inline(always)]
#[must_use]
pub fn ecrsfd(&mut self) -> ECRSFD_W<ETH_MACCR_SPEC, 11> {
ECRSFD_W::new(self)
}
#[doc = "Bit 12 - LM"]
#[inline(always)]
#[must_use]
pub fn lm(&mut self) -> LM_W<ETH_MACCR_SPEC, 12> {
LM_W::new(self)
}
#[doc = "Bit 13 - DM"]
#[inline(always)]
#[must_use]
pub fn dm(&mut self) -> DM_W<ETH_MACCR_SPEC, 13> {
DM_W::new(self)
}
#[doc = "Bit 14 - FES"]
#[inline(always)]
#[must_use]
pub fn fes(&mut self) -> FES_W<ETH_MACCR_SPEC, 14> {
FES_W::new(self)
}
#[doc = "Bit 15 - PS"]
#[inline(always)]
#[must_use]
pub fn ps(&mut self) -> PS_W<ETH_MACCR_SPEC, 15> {
PS_W::new(self)
}
#[doc = "Bit 16 - JE"]
#[inline(always)]
#[must_use]
pub fn je(&mut self) -> JE_W<ETH_MACCR_SPEC, 16> {
JE_W::new(self)
}
#[doc = "Bit 17 - JD"]
#[inline(always)]
#[must_use]
pub fn jd(&mut self) -> JD_W<ETH_MACCR_SPEC, 17> {
JD_W::new(self)
}
#[doc = "Bit 18 - BE"]
#[inline(always)]
#[must_use]
pub fn be(&mut self) -> BE_W<ETH_MACCR_SPEC, 18> {
BE_W::new(self)
}
#[doc = "Bit 19 - WD"]
#[inline(always)]
#[must_use]
pub fn wd(&mut self) -> WD_W<ETH_MACCR_SPEC, 19> {
WD_W::new(self)
}
#[doc = "Bit 20 - ACS"]
#[inline(always)]
#[must_use]
pub fn acs(&mut self) -> ACS_W<ETH_MACCR_SPEC, 20> {
ACS_W::new(self)
}
#[doc = "Bit 21 - CST"]
#[inline(always)]
#[must_use]
pub fn cst(&mut self) -> CST_W<ETH_MACCR_SPEC, 21> {
CST_W::new(self)
}
#[doc = "Bit 22 - S2KP"]
#[inline(always)]
#[must_use]
pub fn s2kp(&mut self) -> S2KP_W<ETH_MACCR_SPEC, 22> {
S2KP_W::new(self)
}
#[doc = "Bit 23 - GPSLCE"]
#[inline(always)]
#[must_use]
pub fn gpslce(&mut self) -> GPSLCE_W<ETH_MACCR_SPEC, 23> {
GPSLCE_W::new(self)
}
#[doc = "Bits 24:26 - IPG"]
#[inline(always)]
#[must_use]
pub fn ipg(&mut self) -> IPG_W<ETH_MACCR_SPEC, 24> {
IPG_W::new(self)
}
#[doc = "Bit 27 - IPC"]
#[inline(always)]
#[must_use]
pub fn ipc(&mut self) -> IPC_W<ETH_MACCR_SPEC, 27> {
IPC_W::new(self)
}
#[doc = "Bits 28:30 - SARC"]
#[inline(always)]
#[must_use]
pub fn sarc(&mut self) -> SARC_W<ETH_MACCR_SPEC, 28> {
SARC_W::new(self)
}
#[doc = "Bit 31 - ARPEN"]
#[inline(always)]
#[must_use]
pub fn arpen(&mut self) -> ARPEN_W<ETH_MACCR_SPEC, 31> {
ARPEN_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "The MAC Configuration Register establishes the operating mode of the MAC.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`eth_maccr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`eth_maccr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ETH_MACCR_SPEC;
impl crate::RegisterSpec for ETH_MACCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`eth_maccr::R`](R) reader structure"]
impl crate::Readable for ETH_MACCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`eth_maccr::W`](W) writer structure"]
impl crate::Writable for ETH_MACCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets ETH_MACCR to value 0x8000"]
impl crate::Resettable for ETH_MACCR_SPEC {
const RESET_VALUE: Self::Ux = 0x8000;
}
|
use shorthand::ShortHand;
#[derive(ShortHand, Default)]
struct Command {
value: String,
}
#[test]
fn test_set() { let _: &mut Command = Command::default().set_value("".to_string()); }
fn main() {}
|
// Vectors are resizable arrs
pub fn run(){
let mut numbers_vector : Vec<i32> = vec![1,2,3,4];
// print part is pretty much same with arrays
println!("{:?}", numbers_vector);
println!("Single value of vec: {}", numbers_vector[2]);
println!("Numbers vec occupies {} bytes", std::mem::size_of_val(&numbers_vector));
// Adding and popping last elem
numbers_vector.push(2);
numbers_vector.pop();
println!("Vector is same after push and pop{:?}", numbers_vector);
// Looping through wout idx
for elem in numbers_vector.iter(){
println!("{}", elem);
}
// Looping through
for (elem,i) in numbers_vector.iter().enumerate(){
println!("{}, {}", elem,i);
}
// Looping through and map
for elem in numbers_vector.iter_mut(){
*elem += 2;
}
println!("After mutation {:?}", numbers_vector);
} |
use crate::format::problem::*;
use crate::format::solution::*;
use crate::helpers::*;
fn create_test_plan_with_three_jobs() -> Plan {
Plan {
jobs: vec![
create_delivery_job_with_order("job1", vec![2., 0.], 2),
create_delivery_job_with_order("job2", vec![5., 0.], 1),
create_delivery_job("job3", vec![7., 0.]),
],
relations: Option::None,
}
}
fn create_test_limit() -> Option<VehicleLimits> {
Some(VehicleLimits { max_distance: Some(15.), shift_time: None, tour_size: None, allowed_areas: None })
}
#[test]
fn can_follow_orders() {
let problem = Problem {
plan: create_test_plan_with_three_jobs(),
fleet: Fleet { vehicles: vec![create_default_vehicle_type()], profiles: create_default_matrix_profiles() },
..create_empty_problem()
};
let matrix = create_matrix_from_problem(&problem);
let solution = solve_with_metaheuristic(problem, Some(vec![matrix]));
assert_eq!(
solution,
Solution {
statistic: Statistic {
cost: 53.,
distance: 20,
duration: 23,
times: Timing { driving: 20, serving: 3, waiting: 0, break_time: 0 },
},
tours: vec![Tour {
vehicle_id: "my_vehicle_1".to_string(),
type_id: "my_vehicle".to_string(),
shift_index: 0,
stops: vec![
create_stop_with_activity(
"departure",
"departure",
(0., 0.),
3,
("1970-01-01T00:00:00Z", "1970-01-01T00:00:00Z"),
0
),
create_stop_with_activity(
"job2",
"delivery",
(5., 0.),
2,
("1970-01-01T00:00:05Z", "1970-01-01T00:00:06Z"),
5
),
create_stop_with_activity(
"job1",
"delivery",
(2., 0.),
1,
("1970-01-01T00:00:09Z", "1970-01-01T00:00:10Z"),
8
),
create_stop_with_activity(
"job3",
"delivery",
(7., 0.),
0,
("1970-01-01T00:00:15Z", "1970-01-01T00:00:16Z"),
13
),
create_stop_with_activity(
"arrival",
"arrival",
(0., 0.),
0,
("1970-01-01T00:00:23Z", "1970-01-01T00:00:23Z"),
20
)
],
statistic: Statistic {
cost: 53.,
distance: 20,
duration: 23,
times: Timing { driving: 20, serving: 3, waiting: 0, break_time: 0 },
},
}],
..create_empty_solution()
}
);
}
#[test]
fn can_assign_more_jobs_ignoring_order_with_default_objective() {
let problem = Problem {
plan: create_test_plan_with_three_jobs(),
fleet: Fleet {
vehicles: vec![VehicleType { limits: create_test_limit(), ..create_default_vehicle_type() }],
profiles: create_default_matrix_profiles(),
},
..create_empty_problem()
};
let matrix = create_matrix_from_problem(&problem);
let solution = solve_with_metaheuristic(problem, Some(vec![matrix]));
assert!(solution.unassigned.is_none());
assert_eq!(solution.tours.len(), 1);
assert_eq!(solution.statistic.distance, 14);
}
#[test]
fn can_follow_order_when_prioritized_property_set() {
let problem = Problem {
plan: create_test_plan_with_three_jobs(),
fleet: Fleet {
vehicles: vec![VehicleType { limits: create_test_limit(), ..create_default_vehicle_type() }],
profiles: create_default_matrix_profiles(),
},
objectives: Some(vec![
vec![Objective::MinimizeUnassignedJobs { breaks: None }],
vec![Objective::MinimizeTours {}],
vec![Objective::TourOrder { is_constrained: true }],
vec![Objective::MinimizeCost],
]),
..create_empty_problem()
};
let matrix = create_matrix_from_problem(&problem);
let solution = solve_with_metaheuristic(problem, Some(vec![matrix]));
assert_eq!(solution.tours.len(), 1);
assert_eq!(
solution.unassigned,
Some(vec![UnassignedJob {
job_id: "job3".to_string(),
reasons: vec![UnassignedJobReason {
code: "MAX_DISTANCE_CONSTRAINT".to_string(),
description: "cannot be assigned due to max distance constraint of vehicle".to_string()
}]
}])
);
}
|
use super::*;
impl TableBuilder for PostgresQueryBuilder {
fn prepare_column_def(&self, column_def: &ColumnDef, sql: &mut SqlWriter) {
column_def.name.prepare(sql, '"');
self.prepare_column_type_check_auto_increment(column_def, sql);
for column_spec in column_def.spec.iter() {
if let ColumnSpec::AutoIncrement = column_spec {
continue;
}
write!(sql, " ").unwrap();
self.prepare_column_spec(column_spec, sql);
}
}
fn prepare_column_type(&self, column_type: &ColumnType, sql: &mut SqlWriter) {
write!(
sql,
"{}",
match column_type {
ColumnType::Char(length) => match length {
Some(length) => format!("char({})", length),
None => "char".into(),
},
ColumnType::String(length) => match length {
Some(length) => format!("varchar({})", length),
None => "varchar".into(),
},
ColumnType::Text => "text".into(),
ColumnType::TinyInteger(length) => match length {
Some(length) => format!("tinyint({})", length),
None => "tinyint".into(),
},
ColumnType::SmallInteger(length) => match length {
Some(length) => format!("smallint({})", length),
None => "smallint".into(),
},
ColumnType::Integer(length) => match length {
Some(length) => format!("integer({})", length),
None => "integer".into(),
},
ColumnType::BigInteger(length) => match length {
Some(length) => format!("bigint({})", length),
None => "bigint".into(),
},
ColumnType::Float(precision) => match precision {
Some(precision) => format!("real({})", precision),
None => "real".into(),
},
ColumnType::Double(precision) => match precision {
Some(precision) => format!("double precision({})", precision),
None => "double precision".into(),
},
ColumnType::Decimal(precision) => match precision {
Some((precision, scale)) => format!("decimal({}, {})", precision, scale),
None => "decimal".into(),
},
ColumnType::DateTime(precision) => match precision {
Some(precision) => format!("datetime({})", precision),
None => "datetime".into(),
},
ColumnType::Timestamp(precision) => match precision {
Some(precision) => format!("timestamp({})", precision),
None => "timestamp".into(),
},
ColumnType::Time(precision) => match precision {
Some(precision) => format!("time({})", precision),
None => "time".into(),
},
ColumnType::Date => "date".into(),
ColumnType::Binary(length) => match length {
Some(length) => format!("binary({})", length),
None => "binary".into(),
},
ColumnType::Boolean => "bool".into(),
ColumnType::Money(precision) => match precision {
Some((precision, scale)) => format!("money({}, {})", precision, scale),
None => "money".into(),
},
ColumnType::Json => "json".into(),
ColumnType::JsonBinary => "jsonb".into(),
ColumnType::Custom(iden) => iden.to_string(),
}
)
.unwrap()
}
fn prepare_column_spec(&self, column_spec: &ColumnSpec, sql: &mut SqlWriter) {
match column_spec {
ColumnSpec::Null => write!(sql, "NULL"),
ColumnSpec::NotNull => write!(sql, "NOT NULL"),
ColumnSpec::Default(value) => write!(sql, "DEFAULT {}", self.value_to_string(value)),
ColumnSpec::AutoIncrement => write!(sql, ""),
ColumnSpec::UniqueKey => write!(sql, "UNIQUE"),
ColumnSpec::PrimaryKey => write!(sql, "PRIMARY KEY"),
ColumnSpec::Extra(string) => write!(sql, "{}", string),
}
.unwrap()
}
fn prepare_table_partition(&self, _table_partition: &TablePartition, _sql: &mut SqlWriter) {}
fn prepare_table_alter_statement(&self, alter: &TableAlterStatement, sql: &mut SqlWriter) {
let alter_option = match &alter.alter_option {
Some(alter_option) => alter_option,
None => panic!("No alter option found"),
};
write!(sql, "ALTER TABLE ").unwrap();
if let Some(table) = &alter.table {
table.prepare(sql, '"');
write!(sql, " ").unwrap();
}
match alter_option {
TableAlterOption::AddColumn(column_def) => {
write!(sql, "ADD COLUMN ").unwrap();
self.prepare_column_def(column_def, sql);
}
TableAlterOption::ModifyColumn(column_def) => {
write!(sql, "ALTER COLUMN ").unwrap();
column_def.name.prepare(sql, '"');
write!(sql, " TYPE").unwrap();
self.prepare_column_type_check_auto_increment(column_def, sql);
for column_spec in column_def.spec.iter() {
if let ColumnSpec::AutoIncrement = column_spec {
continue;
}
write!(sql, ", ").unwrap();
write!(sql, "ALTER COLUMN ").unwrap();
column_def.name.prepare(sql, '"');
write!(sql, " SET ").unwrap();
self.prepare_column_spec(column_spec, sql);
}
}
TableAlterOption::RenameColumn(from_name, to_name) => {
write!(sql, "RENAME COLUMN ").unwrap();
from_name.prepare(sql, '"');
write!(sql, " TO ").unwrap();
to_name.prepare(sql, '"');
}
TableAlterOption::DropColumn(column_name) => {
write!(sql, "DROP COLUMN ").unwrap();
column_name.prepare(sql, '"');
}
}
}
fn prepare_table_rename_statement(&self, rename: &TableRenameStatement, sql: &mut SqlWriter) {
write!(sql, "ALTER TABLE ").unwrap();
if let Some(from_name) = &rename.from_name {
from_name.prepare(sql, '"');
}
write!(sql, " RENAME TO ").unwrap();
if let Some(to_name) = &rename.to_name {
to_name.prepare(sql, '"');
}
}
}
impl PostgresQueryBuilder {
fn prepare_column_type_check_auto_increment(
&self,
column_def: &ColumnDef,
sql: &mut SqlWriter,
) {
if let Some(column_type) = &column_def.types {
write!(sql, " ").unwrap();
let is_auto_increment = column_def
.spec
.iter()
.position(|s| matches!(s, ColumnSpec::AutoIncrement));
if is_auto_increment.is_some() {
match &column_type {
ColumnType::SmallInteger(_) => write!(sql, "smallserial").unwrap(),
ColumnType::Integer(_) => write!(sql, "serial").unwrap(),
ColumnType::BigInteger(_) => write!(sql, "bigserial").unwrap(),
_ => unimplemented!(),
}
} else {
self.prepare_column_type(&column_type, sql);
}
}
}
}
|
use rust_htslib::bam;
use rust_htslib::bam::Read;
use failure::*;
pub fn read_tag(r1: &bam::Record) -> Option<&[u8]> {
if let Some(delim_pos) = r1.qname().iter().position(|&ch| ch == b'_') {
Some(r1.qname().split_at(delim_pos).0)
} else {
None
}
}
pub struct BarcodeGroups<'a> {
bam_reader: &'a mut bam::Reader,
next_record: Option<bam::Record>,
read_barcode: &'a dyn Fn(&bam::Record) -> Option<&[u8]>,
}
impl<'a> BarcodeGroups<'a> {
pub fn new_with_read_names(bam_reader: &'a mut bam::Reader) -> Result<Self, failure::Error> {
Self::new(&read_tag, bam_reader)
}
pub fn new(
read_barcode: &'a dyn Fn(&bam::Record) -> Option<&[u8]>,
bam_reader: &'a mut bam::Reader,
) -> Result<Self, failure::Error> {
let mut bg = BarcodeGroups {
bam_reader: bam_reader,
next_record: None,
read_barcode: read_barcode,
};
bg.next_record = bg.read_next_record()?;
Ok(bg)
}
fn read_next_record(&mut self) -> Result<Option<bam::Record>, failure::Error> {
let mut rec = bam::Record::new();
match self.bam_reader.read(&mut rec) {
Ok(true) => Ok(Some(rec)),
Ok(false) => Ok(None),
Err(e) => Err(e.into()),
}
}
fn barcode_group(
&mut self,
curr: bam::Record,
) -> Result<(Vec<u8>, Vec<bam::Record>), failure::Error> {
let curr_bc = (self.read_barcode)(&curr)
.ok_or(err_msg("No barcode for current read"))?
.to_vec();
let mut bc_group = Vec::new();
bc_group.push(curr);
loop {
let next = self.read_next_record()?;
if let Some(rec) = next {
// Ends borrow of rec to analyze barcode
let same_group = (self.read_barcode)(&rec)
.ok_or(format_err!("No barcode for read {:?}", rec))?
== curr_bc.as_slice();
if same_group {
bc_group.push(rec);
} else {
self.next_record = Some(rec);
break;
}
} else {
self.next_record = None;
break;
}
}
Ok((curr_bc, bc_group))
}
}
impl<'a> Iterator for BarcodeGroups<'a> {
type Item = Result<(Vec<u8>, Vec<bam::Record>), failure::Error>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(curr) = self.next_record.take() {
Some(self.barcode_group(curr))
} else {
None
}
}
}
|
extern crate irc;
extern crate libc;
extern crate bound_tcp_stream;
use std::default::Default;
use irc::client::prelude::*;
use std::thread;
use std::time;
use std::str::FromStr;
use std::net::{IpAddr, SocketAddr};
fn main() {
let dest_addr = SocketAddr::new(IpAddr::from_str("2a00:1a28:1100:11::42").unwrap(), 6667);
let ips = vec![
IpAddr::from_str("2604:a880:800:10::19e0:7000").unwrap(),
IpAddr::from_str("2604:a880:800:10::19e0:7002").unwrap(),
IpAddr::from_str("2604:a880:800:10::19e0:7003").unwrap(),
IpAddr::from_str("2604:a880:800:10::19e0:7004").unwrap(),
IpAddr::from_str("2604:a880:800:10::19e0:7005").unwrap(),
IpAddr::from_str("2604:a880:800:10::19e0:7006").unwrap(),
IpAddr::from_str("2604:a880:800:10::19e0:7007").unwrap(),
IpAddr::from_str("2604:a880:800:10::19e0:7008").unwrap(),
IpAddr::from_str("2604:a880:800:10::19e0:7009").unwrap(),
IpAddr::from_str("2604:a880:800:10::19e0:700a").unwrap(),
IpAddr::from_str("2604:a880:800:10::19e0:700b").unwrap(),
IpAddr::from_str("2604:a880:800:10::19e0:700c").unwrap(),
IpAddr::from_str("2604:a880:800:10::19e0:700d").unwrap(),
IpAddr::from_str("2604:a880:800:10::19e0:700e").unwrap(),
IpAddr::from_str("2604:a880:800:10::19e0:700f").unwrap(),
];
let mut source_addrs: Vec<SocketAddr> = vec![];
for i in 0..ips.len() {
source_addrs.push(SocketAddr::new(ips[i], 0));
}
let mut bot_count = 0;
for _ in 0..29 {
for j in 0..source_addrs.len() {
let source_addr = source_addrs[j];
thread::spawn(move || {
let bot_id = bot_count;
run(bot_id as u64, source_addr, dest_addr);
});
thread::sleep(time::Duration::from_millis(1000));
bot_count += 1;
}
thread::sleep(time::Duration::from_millis(3000));
}
loop {
thread::sleep(time::Duration::from_millis(500));
}
}
fn run(bot_id: u64, source_addr: SocketAddr, dest_addr: SocketAddr) {
let config = Config {
nickname: Some(format!("\\46bot{}", bot_id)),
channels: Some(vec![format!("#46bots")]),
source_addr: Some(source_addr),
dest_addr: Some(dest_addr),
.. Default::default()
};
let server = IrcServer::from_config(config).unwrap();
server.identify().unwrap();
for message in server.iter() {
let message = message.unwrap(); // We'll just panic if there's an error.
//print!("{:?}", message);
match message.command {
Command::PRIVMSG(ref target, ref msg) => {
if msg == "\\46bots: leave" {
thread::sleep(time::Duration::from_millis(50 * bot_id));
server.send_part(target).unwrap();
}
},
_ => (),
}
if message.prefix.is_some() {
let prefix = message.prefix.unwrap();
if prefix == "_46bit!~fortysix@pdpc/supporter/student/mmokrysz" {
match message.command {
Command::PRIVMSG(ref target, ref msg) => {
print!("prefix={:?} command={:?}", prefix, message.command);
if msg == "\\46bots: quit" {
thread::sleep(time::Duration::from_millis(50 * bot_id));
server.send_quit("_46bites the dust.").unwrap();
return;
} else if msg.starts_with("\\46bots: join ") {
thread::sleep(time::Duration::from_millis(50 * bot_id));
let (_, channel) = msg.split_at("\\46bots: join ".len());
server.send_join(channel).unwrap();
} else if msg.starts_with("\\46bots: say ") {
thread::sleep(time::Duration::from_millis(50 * bot_id));
let (_, words) = msg.split_at("\\46bots: say ".len());
server.send_notice(target, words).unwrap();
}
},
_ => (),
}
}
}
}
}
|
use super::*;
use super::super::error::Response::Wrong;
use std::rc::Rc;
pub struct Parser<'p> {
index: usize,
tokens: Vec<Token>,
source: &'p Source,
indent_standard: usize,
indent: usize,
}
impl<'p> Parser<'p> {
pub fn new(tokens: Vec<Token>, source: &'p Source) -> Self {
Parser {
tokens,
source,
index: 0,
indent_standard: 0,
indent: 0,
}
}
pub fn parse(&mut self) -> Result<Vec<Statement>, ()> {
let mut ast = Vec::new();
while self.remaining() > 0 {
ast.push(self.parse_statement()?)
}
Ok(ast)
}
fn parse_statement(&mut self) -> Result<Statement, ()> {
use self::TokenType::*;
while self.current_type() == EOL && self.remaining() != 0 {
self.next()?
}
let position = self.current_position();
let statement = match self.current_type() {
Identifier => {
let backup_index = self.index;
let name = self.eat()?;
let mut parents = Vec::new();
while self.current_lexeme() == "->" {
self.next()?;
parents.push(self.parse_expression()?)
}
if self.current_lexeme() == ":" {
self.next()?;
let body = if self.current_lexeme() == "\n" {
self.new_line()?;
self.next_newline()?;
self.parse_body()?
} else {
let expression = self.parse_expression()?;
let position = expression.pos.clone();
vec!(
Statement::new(
StatementNode::Expression(
expression
),
position
)
)
};
let record = Statement::new(
StatementNode::Record(
name,
parents,
body,
),
position
);
return Ok(record)
} else if self.current_lexeme() == "=" {
self.next()?;
Statement::new(
StatementNode::Assignment(name, self.parse_expression()?),
position
)
} else {
self.index = backup_index;
let expression = self.parse_expression()?;
let position = expression.pos.clone();
Statement::new(
StatementNode::Expression(expression),
position,
)
}
},
_ => {
let expression = self.parse_expression()?;
let position = expression.pos.clone();
Statement::new(
StatementNode::Expression(expression),
position,
)
},
};
self.new_line()?;
Ok(statement)
}
fn parse_body(&mut self) -> Result<Vec<Statement>, ()> {
let backup_indent = self.indent;
self.indent = self.get_indent();
if self.indent_standard == 0 {
self.indent_standard = self.indent
} else {
if self.indent % self.indent_standard != 0 {
return Err(
response!(
Wrong(format!("found inconsistently indented token")),
self.source.file,
self.current_position()
)
)
}
}
let mut stack = Vec::new();
while !self.is_dedent() && self.remaining() > 0 {
let statement = self.parse_statement()?;
self.next_newline()?;
stack.push(statement)
}
self.indent = backup_indent;
Ok(stack)
}
fn parse_expression(&mut self) -> Result<Expression, ()> {
let atom = self.parse_atom()?;
if self.current_type() == TokenType::Operator {
self.parse_binary(atom)
} else {
Ok(atom)
}
}
fn parse_atom(&mut self) -> Result<Expression, ()> {
use self::TokenType::*;
if self.remaining() == 0 {
Ok(
Expression::new(
ExpressionNode::EOF,
self.current_position()
)
)
} else {
let token_type = self.current_type().clone();
let position = self.current_position();
let expression = match token_type {
Number => Expression::new(
ExpressionNode::Number(self.eat()?.parse::<f64>().unwrap()),
position
),
Str => Expression::new(
ExpressionNode::Str(self.eat()?),
position
),
Bool => Expression::new(
ExpressionNode::Bool(self.eat()? == "true"),
position
),
Identifier => Expression::new(
ExpressionNode::Identifier(self.eat()?),
position
),
Operator => match self.current_lexeme().as_str() {
"-" => {
self.next()?;
Expression::new(
ExpressionNode::Neg(
Rc::new(self.parse_expression()?)
),
self.span_from(position)
)
},
"not" => {
self.next()?;
Expression::new(
ExpressionNode::Not(
Rc::new(self.parse_expression()?)
),
self.span_from(position)
)
},
ref op => return Err(
response!(
Wrong(format!("unexpected operator `{}`", op)),
self.source.file,
self.current_position()
)
)
},
ref token_type => return Err(
response!(
Wrong(format!("unexpected token `{}`", token_type)),
self.source.file,
self.current_position()
)
)
};
if self.remaining() > 0 {
self.parse_postfix(expression)
} else {
Ok(expression)
}
}
}
fn parse_postfix(&mut self, expression: Expression) -> Result<Expression, ()> {
let backup_index = self.index;
if self.remaining() == 0 {
return Ok(expression)
}
match self.current_type() {
ref current => {
if let TokenType::Symbol = current {
if self.current_lexeme() == "(" {
self.next()?;
self.next_newline()?;;
let mut args = Vec::new();
if ![TokenType::Operator, TokenType::Keyword].contains(&self.current_type()) {
while !["\n", ")"].contains(&self.current_lexeme().as_str()) {
args.push(self.parse_expression()?);
if !["\n", ")"].contains(&self.current_lexeme().as_str()) && self.remaining() > 0 {
self.eat_lexeme(",")?;
self.next_newline()?;
}
}
}
self.next_newline()?;
self.eat_lexeme(")")?;
let position = expression.pos.clone();
return Ok(
Expression::new(
ExpressionNode::Call(
Rc::new(expression),
args,
),
self.span_from(position)
)
)
}
}
Ok(expression)
},
_ => Ok(expression)
}
}
fn parse_binary(&mut self, left: Expression) -> Result<Expression, ()> {
let left_position = left.pos.clone();
let mut expression_stack = vec!(left);
let mut operator_stack = vec!(Operator::from_str(&self.eat()?).unwrap());
expression_stack.push(self.parse_atom()?);
while operator_stack.len() > 0 {
while self.current_type() == TokenType::Operator {
let position = self.current_position();
let (operator, precedence) = Operator::from_str(&self.eat()?).unwrap();
if precedence < operator_stack.last().unwrap().1 {
let right = expression_stack.pop().unwrap();
let left = expression_stack.pop().unwrap();
expression_stack.push(
Expression::new(
ExpressionNode::Binary(Rc::new(left), operator_stack.pop().unwrap().0, Rc::new(right)),
self.current_position(),
)
);
if self.remaining() > 0 {
expression_stack.push(self.parse_atom()?);
operator_stack.push((operator, precedence))
} else {
return Err(
response!(
Wrong("reached EOF in operation"),
self.source.file,
position
)
)
}
} else {
expression_stack.push(self.parse_atom()?);
operator_stack.push((operator, precedence))
}
}
let right = expression_stack.pop().unwrap();
let left = expression_stack.pop().unwrap();
expression_stack.push(
Expression::new(
ExpressionNode::Binary(Rc::new(left), operator_stack.pop().unwrap().0, Rc::new(right)),
self.current_position(),
)
);
}
let expression = expression_stack.pop().unwrap();
Ok(
Expression::new(
expression.node,
self.span_from(left_position)
)
)
}
fn new_line(&mut self) -> Result<(), ()> {
if self.remaining() > 0 {
match self.current_lexeme().as_str() {
"\n" => self.next(),
_ => Err(
response!(
Wrong(format!("expected new line found: `{}`", self.current_lexeme())),
self.source.file,
self.current_position()
)
)
}
} else {
Ok(())
}
}
fn next_newline(&mut self) -> Result<(), ()> {
while self.current_lexeme() == "\n" && self.remaining() > 0 {
self.next()?
}
Ok(())
}
fn get_indent(&self) -> usize {
self.current().slice.0 - 1
}
fn is_dedent(&self) -> bool {
self.get_indent() < self.indent && self.current_lexeme() != "\n"
}
fn next(&mut self) -> Result<(), ()> {
if self.index <= self.tokens.len() {
use backtrace::Backtrace;
let bt = Backtrace::new();
if self.current_lexeme() == "fazoo" {
println!("{:?}", bt);
}
self.index += 1;
Ok(())
} else {
Err(
response!(
Wrong("moving outside token stack"),
self.source.file,
self.current_position()
)
)
}
}
fn remaining(&self) -> usize {
self.tokens.len().saturating_sub(self.index)
}
fn current_position(&self) -> Pos {
let current = self.current();
Pos(
current.line.clone(),
current.slice
)
}
fn span_from(&self, left_position: Pos) -> Pos {
let Pos(ref line, ref slice) = left_position;
let Pos(_, ref slice2) = self.current_position();
Pos(line.clone(), (slice.0, if slice2.1 < line.1.len() { slice2.1 } else { line.1.len() } ))
}
fn current(&self) -> Token {
if self.index > self.tokens.len() - 1 {
self.tokens[self.tokens.len() - 1].clone()
} else {
self.tokens[self.index].clone()
}
}
fn eat(&mut self) -> Result<String, ()> {
let lexeme = self.current().lexeme;
self.next()?;
Ok(lexeme)
}
fn eat_lexeme(&mut self, lexeme: &str) -> Result<String, ()> {
if self.current_lexeme() == lexeme {
let lexeme = self.current().lexeme;
self.next()?;
Ok(lexeme)
} else {
Err(
response!(
Wrong(format!("expected `{}` but found `{}`", lexeme, self.current_lexeme())),
self.source.file,
self.current_position()
)
)
}
}
fn eat_type(&mut self, token_type: &TokenType) -> Result<String, ()> {
if self.current_type() == *token_type {
let lexeme = self.current().lexeme.clone();
self.next()?;
Ok(lexeme)
} else {
Err(
response!(
Wrong(format!("expected `{}` but found `{}`", token_type, self.current_type())),
self.source.file,
self.current_position()
)
)
}
}
fn current_lexeme(&self) -> String {
self.current().lexeme.clone()
}
fn current_type(&self) -> TokenType {
self.current().token_type
}
fn expect_type(&self, token_type: TokenType) -> Result<(), ()> {
if self.current_type() == token_type {
Ok(())
} else {
Err(
response!(
Wrong(format!("expected `{}` but found `{}`", token_type, self.current_type())),
self.source.file
)
)
}
}
fn expect_lexeme(&self, lexeme: &str) -> Result<(), ()> {
if self.current_lexeme() == lexeme {
Ok(())
} else {
Err(
response!(
Wrong(format!("expected `{}` but found `{}`", lexeme, self.current_lexeme())),
self.source.file
)
)
}
}
} |
use chrono::NaiveDateTime;
pub trait SystemTimeExt {
fn date_time(&self) -> &NaiveDateTime;
}
|
#[doc = "Register `GICV_CTLR` reader"]
pub type R = crate::R<GICV_CTLR_SPEC>;
#[doc = "Register `GICV_CTLR` writer"]
pub type W = crate::W<GICV_CTLR_SPEC>;
#[doc = "Field `ENABLEGRP0` reader - ENABLEGRP0"]
pub type ENABLEGRP0_R = crate::BitReader;
#[doc = "Field `ENABLEGRP0` writer - ENABLEGRP0"]
pub type ENABLEGRP0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ENABLEGRP1` reader - ENABLEGRP1"]
pub type ENABLEGRP1_R = crate::BitReader;
#[doc = "Field `ENABLEGRP1` writer - ENABLEGRP1"]
pub type ENABLEGRP1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ACKCTL` reader - ACKCTL"]
pub type ACKCTL_R = crate::BitReader;
#[doc = "Field `ACKCTL` writer - ACKCTL"]
pub type ACKCTL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FIQEN` reader - FIQEN"]
pub type FIQEN_R = crate::BitReader;
#[doc = "Field `FIQEN` writer - FIQEN"]
pub type FIQEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CBPR` reader - CBPR"]
pub type CBPR_R = crate::BitReader;
#[doc = "Field `CBPR` writer - CBPR"]
pub type CBPR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `EOIMODE` reader - EOIMODE"]
pub type EOIMODE_R = crate::BitReader;
#[doc = "Field `EOIMODE` writer - EOIMODE"]
pub type EOIMODE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - ENABLEGRP0"]
#[inline(always)]
pub fn enablegrp0(&self) -> ENABLEGRP0_R {
ENABLEGRP0_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - ENABLEGRP1"]
#[inline(always)]
pub fn enablegrp1(&self) -> ENABLEGRP1_R {
ENABLEGRP1_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - ACKCTL"]
#[inline(always)]
pub fn ackctl(&self) -> ACKCTL_R {
ACKCTL_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - FIQEN"]
#[inline(always)]
pub fn fiqen(&self) -> FIQEN_R {
FIQEN_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - CBPR"]
#[inline(always)]
pub fn cbpr(&self) -> CBPR_R {
CBPR_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 9 - EOIMODE"]
#[inline(always)]
pub fn eoimode(&self) -> EOIMODE_R {
EOIMODE_R::new(((self.bits >> 9) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - ENABLEGRP0"]
#[inline(always)]
#[must_use]
pub fn enablegrp0(&mut self) -> ENABLEGRP0_W<GICV_CTLR_SPEC, 0> {
ENABLEGRP0_W::new(self)
}
#[doc = "Bit 1 - ENABLEGRP1"]
#[inline(always)]
#[must_use]
pub fn enablegrp1(&mut self) -> ENABLEGRP1_W<GICV_CTLR_SPEC, 1> {
ENABLEGRP1_W::new(self)
}
#[doc = "Bit 2 - ACKCTL"]
#[inline(always)]
#[must_use]
pub fn ackctl(&mut self) -> ACKCTL_W<GICV_CTLR_SPEC, 2> {
ACKCTL_W::new(self)
}
#[doc = "Bit 3 - FIQEN"]
#[inline(always)]
#[must_use]
pub fn fiqen(&mut self) -> FIQEN_W<GICV_CTLR_SPEC, 3> {
FIQEN_W::new(self)
}
#[doc = "Bit 4 - CBPR"]
#[inline(always)]
#[must_use]
pub fn cbpr(&mut self) -> CBPR_W<GICV_CTLR_SPEC, 4> {
CBPR_W::new(self)
}
#[doc = "Bit 9 - EOIMODE"]
#[inline(always)]
#[must_use]
pub fn eoimode(&mut self) -> EOIMODE_W<GICV_CTLR_SPEC, 9> {
EOIMODE_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "GICV virtual machine control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`gicv_ctlr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`gicv_ctlr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct GICV_CTLR_SPEC;
impl crate::RegisterSpec for GICV_CTLR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`gicv_ctlr::R`](R) reader structure"]
impl crate::Readable for GICV_CTLR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`gicv_ctlr::W`](W) writer structure"]
impl crate::Writable for GICV_CTLR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets GICV_CTLR to value 0"]
impl crate::Resettable for GICV_CTLR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use super::{Indicator, MovingAverage};
use crate::economy::Monetary;
pub struct EMA<const PERIOD: usize> {
ema: Monetary,
count: usize,
}
impl<const PERIOD: usize> Indicator for EMA<PERIOD> {
type Output = Option<Monetary>;
fn initialize(value: Monetary) -> Self {
EMA {
ema: value,
count: 0,
}
}
fn evaluate(&mut self, value: Monetary) -> Self::Output {
self.count += 1;
let alpha = 2.0 / (1.0 + PERIOD as f64);
self.ema *= 1.0 - alpha;
self.ema += value * alpha;
if self.count >= PERIOD {
Some(self.ema)
} else {
None
}
}
}
impl<const PERIOD: usize> MovingAverage for EMA<PERIOD> {}
#[tokio::test]
async fn test_ema() {
let mut ema = EMA::<9>::initialize(0.0);
for i in 0..8 {
assert_eq!(ema.evaluate(i as f64), None);
}
let result = ema.evaluate(8.0).unwrap();
assert!(result > 4.6 && result < 4.7);
}
|
#[doc = "Register `L2DCCR` reader"]
pub type R = crate::R<L2DCCR_SPEC>;
#[doc = "Register `L2DCCR` writer"]
pub type W = crate::W<L2DCCR_SPEC>;
#[doc = "Field `DCBLUE` reader - Default Color Blue"]
pub type DCBLUE_R = crate::FieldReader;
#[doc = "Field `DCBLUE` writer - Default Color Blue"]
pub type DCBLUE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
#[doc = "Field `DCGREEN` reader - Default Color Green"]
pub type DCGREEN_R = crate::FieldReader;
#[doc = "Field `DCGREEN` writer - Default Color Green"]
pub type DCGREEN_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
#[doc = "Field `DCRED` reader - Default Color Red"]
pub type DCRED_R = crate::FieldReader;
#[doc = "Field `DCRED` writer - Default Color Red"]
pub type DCRED_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
#[doc = "Field `DCALPHA` reader - Default Color Alpha"]
pub type DCALPHA_R = crate::FieldReader;
#[doc = "Field `DCALPHA` writer - Default Color Alpha"]
pub type DCALPHA_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
impl R {
#[doc = "Bits 0:7 - Default Color Blue"]
#[inline(always)]
pub fn dcblue(&self) -> DCBLUE_R {
DCBLUE_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 8:15 - Default Color Green"]
#[inline(always)]
pub fn dcgreen(&self) -> DCGREEN_R {
DCGREEN_R::new(((self.bits >> 8) & 0xff) as u8)
}
#[doc = "Bits 16:23 - Default Color Red"]
#[inline(always)]
pub fn dcred(&self) -> DCRED_R {
DCRED_R::new(((self.bits >> 16) & 0xff) as u8)
}
#[doc = "Bits 24:31 - Default Color Alpha"]
#[inline(always)]
pub fn dcalpha(&self) -> DCALPHA_R {
DCALPHA_R::new(((self.bits >> 24) & 0xff) as u8)
}
}
impl W {
#[doc = "Bits 0:7 - Default Color Blue"]
#[inline(always)]
#[must_use]
pub fn dcblue(&mut self) -> DCBLUE_W<L2DCCR_SPEC, 0> {
DCBLUE_W::new(self)
}
#[doc = "Bits 8:15 - Default Color Green"]
#[inline(always)]
#[must_use]
pub fn dcgreen(&mut self) -> DCGREEN_W<L2DCCR_SPEC, 8> {
DCGREEN_W::new(self)
}
#[doc = "Bits 16:23 - Default Color Red"]
#[inline(always)]
#[must_use]
pub fn dcred(&mut self) -> DCRED_W<L2DCCR_SPEC, 16> {
DCRED_W::new(self)
}
#[doc = "Bits 24:31 - Default Color Alpha"]
#[inline(always)]
#[must_use]
pub fn dcalpha(&mut self) -> DCALPHA_W<L2DCCR_SPEC, 24> {
DCALPHA_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "LTDC Layer Default Color Configuration Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`l2dccr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`l2dccr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct L2DCCR_SPEC;
impl crate::RegisterSpec for L2DCCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`l2dccr::R`](R) reader structure"]
impl crate::Readable for L2DCCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`l2dccr::W`](W) writer structure"]
impl crate::Writable for L2DCCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets L2DCCR to value 0"]
impl crate::Resettable for L2DCCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#![allow(non_snake_case)]
struct Solution;
/// https://leetcode.com/problems/number-of-1-bits/
impl Solution {
/// 0 ms 2.1 MB
pub fn hammingWeight(n: u32) -> i32 {
let mut n = n;
let mut count = 0;
while n != 0 {
count += n & 1;
n >>= 1;
}
count as i32
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test() {
fn assert(n: u32, e: i32) {
assert_eq!(Solution::hammingWeight(n), e);
}
assert(0b00000000000000000000000000000000, 0);
assert(0b00000000000000000000000000001011, 3);
assert(0b00000000000000000000000010000000, 1);
assert(0b11111111111111111111111111111101, 31);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.